code stringlengths 114 1.05M | path stringlengths 3 312 | quality_prob float64 0.5 0.99 | learning_prob float64 0.2 1 | filename stringlengths 3 168 | kind stringclasses 1
value |
|---|---|---|---|---|---|
from enum import Enum, auto
from typing import TYPE_CHECKING, Union
from ._adc_context import ADCContext
from ._adc_rdp_context import ADCRDPContext
from ._adc_udf_context import ADCUDFContext
from ._cust_inst_context import CustInstContext
from ._cust_inst_rdp_context import CustInstRDPContext
from ._cust_inst_udf_context import CustInstUDFContext
from ._hp_and_cust_inst_context import HPAndCustInstContext
from ._hp_context import HPContext, HPUDFContext, HPRDPContext
from ...content.fundamental_and_reference._data_grid_type import DataGridType
if TYPE_CHECKING:
from .._containers import FieldsContainer, UniverseContainer
class ContextType(Enum):
ADC = auto()
HP = auto()
HPAndCustInst = auto()
CustInst = auto()
data_grid_type_by_context_class_by_context_type = {
ContextType.ADC: {
DataGridType.UDF: ADCUDFContext,
DataGridType.RDP: ADCRDPContext,
},
ContextType.HP: {
DataGridType.UDF: HPUDFContext,
DataGridType.RDP: HPRDPContext,
},
ContextType.CustInst: {
DataGridType.UDF: CustInstUDFContext,
DataGridType.RDP: CustInstRDPContext,
},
ContextType.HPAndCustInst: {
DataGridType.UDF: HPAndCustInstContext,
DataGridType.RDP: HPAndCustInstContext,
},
}
def get_context(
context_type: ContextType,
data_grid_type: "DataGridType",
universe: "UniverseContainer",
fields: "FieldsContainer",
use_field_names_in_headers: bool,
) -> Union[CustInstContext, ADCContext, HPContext, HPAndCustInstContext]:
data_grid_type_by_context_class = data_grid_type_by_context_class_by_context_type.get(context_type)
if not data_grid_type_by_context_class:
raise TypeError(f"Unexpected context_type. Type: {context_type}")
context_class = data_grid_type_by_context_class.get(data_grid_type)
if not context_class:
raise TypeError(f"Unexpected type. Type: {data_grid_type}")
return context_class(universe, fields, use_field_names_in_headers)
def get_cust_inst_context(
data_grid_type: "DataGridType",
universe: "UniverseContainer",
fields: "FieldsContainer",
use_field_names_in_headers: bool,
) -> CustInstContext:
return get_context(ContextType.CustInst, data_grid_type, universe, fields, use_field_names_in_headers)
def get_adc_context(
data_grid_type: "DataGridType",
universe: "UniverseContainer",
fields: "FieldsContainer",
use_field_names_in_headers: bool,
) -> ADCContext:
return get_context(ContextType.ADC, data_grid_type, universe, fields, use_field_names_in_headers)
def get_hp_context(
data_grid_type: "DataGridType",
universe: "UniverseContainer",
fields: "FieldsContainer",
use_field_names_in_headers: bool,
) -> HPContext:
return get_context(ContextType.HP, data_grid_type, universe, fields, use_field_names_in_headers)
def get_hp_and_custinst_context(
data_grid_type: "DataGridType",
universe: "UniverseContainer",
fields: "FieldsContainer",
use_field_names_in_headers: bool,
) -> HPAndCustInstContext:
return get_context(ContextType.HPAndCustInst, data_grid_type, universe, fields, use_field_names_in_headers) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/_fin_coder_layer/context_collection/_context_factory.py | 0.717012 | 0.19888 | _context_factory.py | pypi |
from typing import List, Union, Dict
from ._context import UDFMixin
from ._cust_inst_context import CustInstContext
class CustInstUDFContext(UDFMixin, CustInstContext):
def get_headers(self, headers: dict) -> List[str]:
return [item["name"].capitalize() if item["name"] == "DATE" else item["name"] for item in headers]
def _parse_list_to_data(self, raw: list, field_to_idx: dict) -> List[List[dict]]:
data = []
for raw_item in raw:
if not raw_item:
continue
ric = raw_item["universe"]["ric"]
headers = self.get_headers(raw_item["headers"])
for raw_item_data in raw_item["data"]:
template = {"Instrument": ric}
template.update({header: data for header, data in zip(headers, raw_item_data)})
item = []
for field in field_to_idx:
item.insert(field_to_idx[field], template.get(field))
data.append(item)
return data
def _parse_dict_to_data(self, raw: dict, field_to_idx: dict) -> List[List[dict]]:
data = []
ric = raw["universe"]["ric"]
headers = self.get_headers(raw["headers"])
for raw_data_item in raw["data"]:
template = {"Instrument": ric}
template.update({header: data for header, data in zip(headers, raw_data_item)})
item = []
for field in field_to_idx:
item.insert(field_to_idx[field], template.get(field))
data.append(item)
return data
def prepare_data(self, raw, fields) -> List[List[dict]]:
field_to_idx = {"Instrument": 0, "Date": 1}
field_to_idx.update({item: fields.index(item) + 2 for item in fields})
if isinstance(raw, dict):
return self._parse_dict_to_data(raw, field_to_idx)
elif isinstance(raw, list):
return self._parse_list_to_data(raw, field_to_idx)
@staticmethod
def prepare_headers(raw: Union[list, dict]) -> List[List[Dict]]:
headers = [{"displayName": "Instrument"}, {"displayName": "Date"}]
for item in raw:
headers.append({"displayName": item, "field": item})
return [headers] | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/_fin_coder_layer/context_collection/_cust_inst_udf_context.py | 0.679072 | 0.274631 | _cust_inst_udf_context.py | pypi |
from typing import List, Dict
from dateutil.parser import parse
from ._context import RDPMixin
from ._cust_inst_context import CustInstContext
class CustInstRDPContext(RDPMixin, CustInstContext):
@staticmethod
def get_headers(headers: dict) -> List[str]:
return [item["name"].lower() if item["name"] == "DATE" else item["name"] for item in headers]
def _parse_list_to_data(self, raw: list, field_to_idx: dict) -> List[List[dict]]:
data = []
for raw_item in raw:
if not raw_item:
continue
ric = raw_item["universe"]["ric"]
headers = self.get_headers(raw_item["headers"])
for raw_item_data in raw_item["data"]:
template = {"instrument": ric}
for header, raw_data in zip(headers, raw_item_data):
try:
parse(raw_data, fuzzy=False)
raw_data = f"{raw_data} 00:00:00"
template.update({header: raw_data})
except (ValueError, TypeError):
template.update({header: raw_data})
item = []
for field in field_to_idx:
item.insert(field_to_idx[field], template.get(field))
data.append(item)
return data
def _parse_dict_to_data(self, raw: dict, field_to_idx: dict) -> List[List[dict]]:
data = []
ric = raw["universe"]["ric"]
headers = self.get_headers(raw["headers"])
for raw_data_item in raw["data"]:
template = {"instrument": ric}
for header, raw_data in zip(headers, raw_data_item):
try:
parse(raw_data, fuzzy=False)
raw_data = f"{raw_data} 00:00:00"
template.update({header: raw_data})
except (ValueError, TypeError):
template.update({header: raw_data})
item = []
for field in field_to_idx:
item.insert(field_to_idx[field], template.get(field))
data.append(item)
return data
def prepare_data(self, raw, fields) -> list:
field_to_idx = {"instrument": 0, "date": 1}
field_to_idx.update({field: fields.index(field) + 2 for field in fields})
if isinstance(raw, dict):
return self._parse_dict_to_data(raw, field_to_idx)
elif isinstance(raw, list):
return self._parse_list_to_data(raw, field_to_idx)
@staticmethod
def prepare_headers(raw) -> List[Dict[str, str]]:
headers = [
{"name": "instrument", "title": "Instrument"},
{"name": "date", "title": "Date"},
]
for item in raw:
headers.append({"name": item, "title": item})
return headers | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/_fin_coder_layer/context_collection/_cust_inst_rdp_context.py | 0.529993 | 0.284278 | _cust_inst_rdp_context.py | pypi |
import abc
from collections import defaultdict
from typing import Optional, Dict, Tuple, List
from pandas import DataFrame
from ._context import Context, UDFMixin, RDPMixin
from ...content._historical_raw_transf import transform_to_dicts
class HPContext(Context, abc.ABC):
@property
def can_get_data(self) -> bool:
return bool(self.universe.hp and (not self.fields or self.fields.hp))
@property
def can_build_df(self) -> bool:
return bool(self._hp_data and not (self._adc_data or self._cust_inst_data))
@property
def raw(self) -> Optional[Dict]:
return self._hp_data and self._hp_data.raw
@property
def df(self) -> Optional[DataFrame]:
return self._hp_data and self._hp_data.df
@property
@abc.abstractmethod
def date_name(self) -> str:
# for override
pass
def get_data_fields(self) -> Tuple[Dict[str, List[Dict]], List[str]]:
"""
historical_raw is:
{
'universe': {'ric': 'GS.N'},
'interval': 'P1D',
'summaryTimestampLabel': 'endPeriod',
'adjustments': ['exchangeCorrection', 'manualCorrection', 'CCH', 'CRE', 'RTS', 'RPO'],
'defaultPricingField': 'TRDPRC_1',
'qos': {'timeliness': 'delayed'},
'headers': [{'name': 'DATE', 'type': 'string'}, {'name': 'BID', 'type': 'number', 'decimalChar': '.'}],
'data': [['2023-04-17', 339.7], ['2023-04-14', 336.88]],
'meta': {
'blendingEntry': {
'headers': [{'name': 'DATE', 'type': 'string'}, {'name': 'BID', 'type': 'number', 'decimalChar': '.'}],
'data': [['2023-04-17', 339.7]]
}
}
}
"""
dicts_by_ric = defaultdict(list)
fields = self.fields.hp
return_new_fields = False
date_name = self.date_name.casefold()
for historical_raw in self.raw:
try:
if not fields:
fields = [header["name"] for header in historical_raw["headers"]]
return_new_fields = True
dicts = transform_to_dicts(historical_raw, fields, date_name)
ric = historical_raw["universe"]["ric"]
except Exception:
fields = None
ric = None
dicts = [{date_name: None}]
dicts_by_ric[ric].extend(dicts)
return (
dicts_by_ric,
list(fields) if return_new_fields else list(self.fields),
)
class HPUDFContext(UDFMixin, HPContext):
pass
class HPRDPContext(RDPMixin, HPContext):
pass | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/_fin_coder_layer/context_collection/_hp_context.py | 0.763263 | 0.256163 | _hp_context.py | pypi |
from typing import Optional, Union
import pandas as pd
from ..._types import OptDateTime
from ...content.news import story as _story
from ...content.news import headlines as _headlines
from ...content.news.headlines._sort_order import SortOrder
from enum import unique
from ..._base_enum import StrEnum
@unique
class Format(StrEnum):
TEXT = "Text"
HTML = "Html"
def get_story(
story_id: str,
format: Optional[Union[Format, str]] = Format.HTML,
) -> str:
"""
Retrieves the news story items.
Parameters
----------
story_id : str
News Story ID.
format : str, Format, optional
Response format.
Returns
-------
str
Story html or text response
Examples
--------
>>> import refinitiv.data as rd
>>> response = rd.news.get_story("urn:newsml:reuters.com:20220713:nL1N2YU10J", format=rd.news.Format.TEXT)
"""
content = _story.Definition(story_id).get_data().data.story.content
return content.html if format == Format.HTML else content.text
def get_headlines(
query: str,
count: int = 10,
start: "OptDateTime" = None,
end: "OptDateTime" = None,
order_by: Union[str, SortOrder] = SortOrder.new_to_old,
) -> pd.DataFrame:
"""
Retrieves news headlines.
Parameters
----------
query: str
The user search query for news headlines.
count: int, optional
Count to limit number of headlines.
start: str or timedelta, optional
Beginning of date range.
String format is: '%Y-%m-%dT%H:%M:%S'. e.g. '2016-01-20T15:04:05'.
end: str or timedelta, optional
End of date range.
String format is: '%Y-%m-%dT%H:%M:%S'. e.g. '2016-01-20T15:04:05'.
order_by: str or SortOrder
Sort order for headline items.
Returns
-------
pd.DataFrame
Headlines dataframe
Examples
--------
>>> from datetime import timedelta
>>> import refinitiv.data as rd
>>> response = rd.news.get_headlines(
... "Refinitiv",
... start="20.03.2021",
... end=timedelta(days=-4),
... count=3
... )
"""
definition = _headlines.Definition(query=query, count=count, date_from=start, date_to=end, sort_order=order_by)
return definition.get_data().data.df | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/_fin_coder_layer/news/_news.py | 0.922207 | 0.187244 | _news.py | pypi |
__all__ = ("Definition", "GrantPassword")
from typing import Union as _Union
from .._core.session.grant_password import GrantPassword
class Definition(object):
"""
Platform session.
Can be defined indirectly using the name of a session defined
in the configuration file or directly by specifying the other Definition parameters.
Parameters
----------
name: str, default "default"
Session name
app_key: str, optional
Application key
grant: GrantPassword object, optional
Grants objects containing the credentials used to authenticate the user
(or the machine) when connecting to the data platform.
Several kind of grant objects can be used GrantPassword is the most common one.
signon_control: bool, default True
Controls the exclusive sign-on behavior when the user account
(or computer account) for this session is concurrently used by another
application. When this parameter is set to True, opening this session
automatically disconnects the other application. When this parameter is set to
False, the opening of this session fails preserving the other application.
deployed_platform_host: str, optional
Host name (or IP) and port to be used to connect to Real-Time Distribution
System.
deployed_platform_username: str, optional
DACS username identifying the user when to connect
to a Real-Time Distribution System
dacs_position: str, optional
DACS position identifying the terminal when connecting to a Real-Time
Distribution System.
dacs_application_id: str, optional
Must contain the user's Data Access Control System application ID.
For more information, refer to the DACS documentation on my.refinitiv.com
proxies: str or dict, optional
Proxies configuration. If string, should be the URL of the proxy
(e.g. 'https://proxy.com:8080'). If a dict, the keys are the protocol
name (e.g. 'http', 'https') and the values are the proxy URLs.
Raises
----------
Exception
If app-key is not found in the config file and in arguments.
Examples
--------
>>> import refinitiv.data as rd
>>> definition = rd.session.platform.Definition(name="custom-session-name")
>>> platform_session = definition.get_session()
"""
def __init__(
self,
name: str = "default",
app_key: str = None,
grant: GrantPassword = None,
signon_control: bool = True,
deployed_platform_host: str = None,
deployed_platform_username: str = None,
dacs_position: str = None,
dacs_application_id: str = None,
proxies: _Union[str, dict] = None,
) -> None:
from .._core.session._session_provider import _make_platform_session_provider_by_arguments
if not isinstance(name, str):
raise ValueError("Invalid session name type, please provide string.")
self._create_session = _make_platform_session_provider_by_arguments(
session_name=name,
app_key=app_key,
signon_control=signon_control,
deployed_platform_host=deployed_platform_host,
deployed_platform_username=deployed_platform_username,
dacs_position=dacs_position,
dacs_application_id=dacs_application_id,
grant=grant,
proxies=proxies,
)
def get_session(self):
"""
Creates and returns the session.
Returns
-------
The platform session instance.
"""
session = self._create_session()
return session | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/session/platform.py | 0.902258 | 0.367753 | platform.py | pypi |
from typing import TYPE_CHECKING, Optional
from .omm_stream import OMMStream
from ..._tools import create_repr, fields_arg_parser, try_copy_to_list
if TYPE_CHECKING:
from ..._types import ExtendedParams, OptStrStrs
from ..._core.session import Session
class Definition:
"""
This class to subscribe to streaming items of any Domain Model
(e.g. MarkePrice, MarketByPrice, ...)
exposed by the underlying of the Refinitiv Data
Parameters
----------
name : str, optional
Streaming instrument name.
api: str, optional
Streaming data source.
service : str, optional
Third-party service URL to manage the streaming data.
fields : str or list, optional
Single field or list of fields to return.
domain : str, optional
Specific streaming data domain.
extended_params : dict, optional
Specifies the parameters that will be merged with the request.
Examples
--------
>>> from refinitiv.data.delivery import omm_stream
>>> definition = omm_stream.Definition("EUR")
"""
def __init__(
self,
name: str,
api: Optional[str] = None,
service: Optional[str] = None,
fields: "OptStrStrs" = None,
domain: str = "MarketPrice",
extended_params: "ExtendedParams" = None,
) -> None:
self._name = name
self._api = api
self._domain = domain
self._service = service
fields = try_copy_to_list(fields)
self._fields = fields and fields_arg_parser.get_list(fields)
self._extended_params = extended_params
def __repr__(self):
content = f"{{name='{self._name}'}}"
return create_repr(self, middle_path="omm_stream", content=content)
def get_stream(self, session: "Session" = None) -> OMMStream:
"""
Returns the previously defined data stream from the Refinitiv Data Platform.
Parameters
----------
session: Session, optional
Session object. If it's not passed the default session will be used.
Returns
-------
OMMStream instance
Examples
--------
>>> from refinitiv.data.delivery import omm_stream
>>> definition = omm_stream.Definition("EUR")
>>> stream = definition.get_stream()
"""
stream = OMMStream(
session=session,
name=self._name,
api=self._api,
service=self._service,
fields=self._fields,
domain=self._domain,
extended_params=self._extended_params,
)
return stream | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/_stream/omm_stream_definition.py | 0.932972 | 0.319572 | omm_stream_definition.py | pypi |
from typing import Any, Callable, Optional, Union, TYPE_CHECKING
from ._omm_stream import _OMMStream
from ._stream_factory import create_omm_stream
from .base_stream import StreamOpenWithUpdatesMixin
from ..._content_type import ContentType # noqa
from ..._core.session import get_valid_session
from ..._tools import cached_property, create_repr, make_callback
if TYPE_CHECKING:
from ... import OpenState
from ..._core.session import Session
from .contrib import ContribResponse, ContribType
from ..._types import ExtendedParams, OptStr, Strings
class OMMStream(StreamOpenWithUpdatesMixin):
"""
Open an OMM stream.
Parameters
----------
session: Session
The Session defines the source where you want to retrieve your data
name: string
RIC to retrieve item stream.
api: string, optional
specific name of RDP streaming defined in config file.
i.e. 'streaming.trading-analytics.endpoints.redi'
Default: 'streaming.trading-analytics.endpoints.redi'
domain: string
Specify item stream domain (MarketPrice, MarketByPrice, ...)
Default : "MarketPrice"
service: string, optional
Specify the service to subscribe on.
Default: None
fields: string or list, optional
Specify the fields to retrieve.
Default: None
extended_params: dict, optional
Specify optional params
Default: None
Raises
------
Exception
If request fails or if Refinitiv Services return an error
Examples
--------
>>> import refinitiv.data as rd
>>> APP_KEY = "app_key"
>>> session = rd.session.desktop.Definition(app_key=APP_KEY).get_session()
>>> session.open()
>>>
>>> euro = rd.delivery.omm_stream.Definition("EUR=").get_stream(session)
>>> euro.open()
>>>
>>> def on_update_callback(stream, msg):
... print(msg)
>>>
>>> definition = rd.delivery.omm_stream.Definition("THB=")
>>> thb = definition.get_stream(session)
>>> thb.on_update(on_update_callback)
>>> thb.open()
"""
def __init__(
self,
name: str,
session: Optional["Session"] = None,
api: "OptStr" = None,
domain: str = "MarketPrice",
service: "OptStr" = None,
fields: Optional["Strings"] = None,
extended_params: "ExtendedParams" = None,
) -> None:
self._session = get_valid_session(session)
self._always_use_default_session = session is None
self._name = name
self._api = api
self._domain = domain
self._service = service
self._fields = fields
self._extended_params = extended_params
@cached_property
def _stream(self) -> _OMMStream:
return create_omm_stream(
ContentType.STREAMING_OMM,
api=self._api,
session=self._session,
name=self._name,
domain=self._domain,
service=self._service,
fields=self._fields,
extended_params=self._extended_params,
)
@property
def status(self):
status = {
"status": self._stream.state,
"code": self._stream.stream_state,
"message": self._stream.message_state,
}
return status
def open(self, with_updates: bool = True) -> "OpenState":
"""
Opens the OMMStream to start to stream.
Once it's opened, it can be used in order to retrieve data.
Parameters
----------
with_updates : bool, optional
actions:
True - the streaming will work as usual
and the data will be received continuously.
False - only one data snapshot will be received
(single Refresh 'NonStreaming') and
stream will be closed automatically.
Defaults to True
Returns
-------
OpenState
current state of this OMM stream object.
Examples
--------
>>> from refinitiv.data.delivery import omm_stream
>>> definition = omm_stream.Definition("EUR")
>>> stream = definition.get_stream()
>>> stream.open()
"""
return super().open(with_updates=with_updates)
async def open_async(self, with_updates: bool = True) -> "OpenState":
"""
Opens asynchronously the OMMStream to start to stream
Parameters
----------
with_updates : bool, optional
actions:
True - the streaming will work as usual
and the data will be received continuously.
False - only one data snapshot will be received
(single Refresh 'NonStreaming') and
stream will be closed automatically.
Returns
-------
OpenState
current state of this OMM stream object.
Examples
--------
>>> from refinitiv.data.delivery import omm_stream
>>> definition = omm_stream.Definition("EUR")
>>> stream = definition.get_stream()
>>> await stream.open_async()
"""
return await super().open_async(with_updates=with_updates)
def close(self) -> "OpenState":
"""
Closes the OMMStream connection, releases resources
Returns
-------
OpenState
current state of this OMM stream object.
Examples
--------
>>> from refinitiv.data.delivery import omm_stream
>>> definition = omm_stream.Definition("EUR")
>>> stream = definition.get_stream()
>>> stream.open()
>>> stream.close()
"""
return super().close()
def on_refresh(self, func: Callable[[dict, "OMMStream"], Any]) -> "OMMStream":
"""
This function called when the stream is opened or
when the record is refreshed with a new image.
This callback receives a full image.
Parameters
----------
func : Callable, optional
Callable object to process retrieved refresh data
Returns
-------
OMMStream
current instance is an OMM stream object.
Examples
--------
Prerequisite: The default session must be opened.
>>> from datetime import datetime
>>> from refinitiv.data.delivery import omm_stream
>>>
>>> def display_response(event, stream):
... print(f'Refresh received at {datetime.now}')
... print(event)
>>>
>>> definition = omm_stream.Definition("EUR")
>>> stream = definition.get_stream()
>>> stream.on_refresh(display_response)
>>>
>>> stream.open()
"""
self._stream.on_refresh(make_callback(func))
return self
def on_update(self, func: Callable[[dict, "OMMStream"], Any]) -> "OMMStream":
"""
This function called when an update is received.
Parameters
----------
func : Callable, optional
Callable object to process retrieved update data
Returns
-------
OMMStream
current instance is an OMM stream object.
Examples
--------
Prerequisite: The default session must be opened.
>>> from datetime import datetime
>>> from refinitiv.data.delivery import omm_stream
>>>
>>> def display_response(event, stream):
... print(f'Update received at {datetime.now}')
... print(event)
>>>
>>> definition = omm_stream.Definition("EUR")
>>> stream = definition.get_stream()
>>> stream.on_update(display_response)
>>>
>>> stream.open()
"""
self._stream.on_update(make_callback(func))
return self
def on_status(self, func: Callable[[dict, "OMMStream"], Any]) -> "OMMStream":
"""
This function these notifications are emitted when
the status of one of the requested instruments changes
Parameters
----------
func : Callable, optional
Callable object to process retrieved status data
Returns
-------
OMMStream
current instance is an OMM stream object.
Examples
--------
Prerequisite: The default session must be opened.
>>> from datetime import datetime
>>> from refinitiv.data.delivery import omm_stream
>>>
>>> def display_response(event, stream):
... print(f'Status received at {datetime.now}')
... print(event)
>>>
>>> definition = omm_stream.Definition("EUR")
>>> stream = definition.get_stream()
>>> stream.on_status(display_response)
>>>
>>> stream.open()
"""
self._stream.on_status(make_callback(func))
return self
def on_complete(self, func: Callable[[dict, "OMMStream"], Any]) -> "OMMStream":
"""
This function called on complete event
Parameters
----------
func : Callable, optional
Callable object to process when retrieved on complete data.
Returns
-------
OMMStream
current instance is an OMM stream object.
Examples
--------
Prerequisite: The default session must be opened.
>>> from datetime import datetime
>>> from refinitiv.data.delivery import omm_stream
>>>
>>> def display_response(event, stream):
... print(f'Complete received at {datetime.now}')
... print(event)
>>>
>>> definition = omm_stream.Definition("EUR")
>>> stream = definition.get_stream()
>>> stream.on_complete(display_response)
>>>
>>> stream.open()
"""
self._stream.on_complete(make_callback(func))
return self
def on_error(self, func: Callable[[dict, "OMMStream"], Any]) -> "OMMStream":
"""
This function called when an error occurs
Parameters
----------
func : Callable, optional
Callable object to process when retrieved error data.
Returns
-------
OMMStream
current instance is an OMM stream object.
Examples
--------
Prerequisite: The default session must be opened.
>>> from datetime import datetime
>>> from refinitiv.data.delivery import omm_stream
>>>
>>> def display_response(event, response):
... print(f'Error received at {datetime.now}')
... print(event)
>>>
>>> definition = omm_stream.Definition("EUR")
>>> stream = definition.get_stream()
>>> stream.on_error(display_response)
>>>
>>> stream.open()
"""
self._stream.on_error(make_callback(func))
return self
def on_ack(self, on_ack: Callable[[dict, "OMMStream"], Any]) -> "OMMStream":
"""
This function called when the stream received an ack message after sending a contribution .
Parameters
----------
on_ack : Callable, optional
Callable object to process retrieved ack data
Returns
-------
OMMStream
current instance is an OMM stream object.
Examples
--------
Prerequisite: The default session must be opened.
>>> from datetime import datetime
>>> from refinitiv.data.delivery import omm_stream
>>>
>>> def display_response(response, event_type, event):
... print(f'{response} - {event_type} received at {datetime.now}')
... print(event)
>>>
>>> definition = omm_stream.Definition("EUR")
>>> stream = definition.get_stream()
>>> stream.on_ack(lambda event, stream: display_response(stream, 'ack', event))
>>>
>>> stream.open()
>>> stream.contribute({"ASK": 123, "BID": 125})
"""
self._stream.on_ack(make_callback(on_ack))
return self
def __repr__(self):
return create_repr(
self,
middle_path="omm_stream",
class_name=self.__class__.__name__,
)
def contribute(
self,
fields: dict,
contrib_type: Union[str, "ContribType", None] = None,
post_user_info: Optional[dict] = None,
) -> "ContribResponse":
"""
Function to send OnStream contribution request.
Parameters
----------
fields: dict{field:value}
Specify fields and values to contribute.
contrib_type: Union[str, ContribType], optional
Define the contribution type
Default: "Update"
post_user_info: dict, optional
PostUserInfo object Represents information about the posting user.
Address: string, required
Dotted-decimal string representing the IP Address of the posting user.
UserID: int, required
Specifies the ID of the posting user
Returns
-------
ContribResponse
Examples
--------
Prerequisite: The default session must be opened.
>>> from datetime import datetime
>>> from refinitiv.data.delivery import omm_stream
>>>
>>> def display_response(response, event_type, event):
... print(f'{response} - {event_type} received at {datetime.now}')
... print(event)
>>>
>>> definition = omm_stream.Definition("EUR")
>>> stream = definition.get_stream()
>>> stream.on_ack(lambda event, stream: display_response(stream, 'ack', event))
>>>
>>> stream.open()
>>> response = stream.contribute({"ASK": 123, "BID": 125})
"""
return self._stream.contribute(fields, contrib_type=contrib_type, post_user_info=post_user_info)
async def contribute_async(
self,
fields: dict,
contrib_type: Union[str, "ContribType", None] = None,
post_user_info: Optional[dict] = None,
) -> "ContribResponse":
"""
Function to send asynchronous OnStream contribution request.
Parameters
----------
fields: dict{field:value}
Specify fields and values to contribute.
contrib_type: Union[str, ContribType], optional
Define the contribution type
Default: "Update"
post_user_info: dict, optional
PostUserInfo object Represents information about the posting user.
Address: string, required
Dotted-decimal string representing the IP Address of the posting user.
UserID: int, required
Specifies the ID of the posting user
Returns
-------
ContribResponse
Examples
--------
Prerequisite: The default session must be opened.
>>> from datetime import datetime
>>> from refinitiv.data.delivery import omm_stream
>>>
>>> def display_response(response, event_type, event):
... print(f'{response} - {event_type} received at {datetime.now}')
... print(event)
>>>
>>> definition = omm_stream.Definition("EUR")
>>> stream = definition.get_stream()
>>> stream.on_ack(lambda event, stream: display_response(stream, 'ack', event))
>>>
>>> stream.open()
>>> response = await stream.contribute_async({"ASK": 123, "BID": 125})
"""
return await self._stream.contribute_async(fields, contrib_type=contrib_type, post_user_info=post_user_info) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/_stream/omm_stream.py | 0.92691 | 0.237731 | omm_stream.py | pypi |
from typing import TYPE_CHECKING, Any
from .rdp_stream import RDPStream
from ..._tools import try_copy_to_list
if TYPE_CHECKING:
from ..._types import ExtendedParams, StrStrings
from ..._core.session import Session
class Definition:
"""
Defines the data to retrieve using RDP the data stream from the Refinitiv Data Platform.
Parameters
----------
service: string, optional
RDP service name.
universe: list
Single instrument or list of instruments.
view: list
Data fields that should be retrieved from the data stream.
parameters: dict
Extra parameters to retrieve from the item stream.
api: string
RDP streaming data source.
extended_params: dict, optional
Specifies the parameters that will be merged with the request.
Examples
--------
>>> from refinitiv.data.delivery import rdp_stream
>>> definition = rdp_stream.Definition(
... service=None,
... universe=[],
... view=None,
... parameters={"universeType": "RIC"},
... api="streaming.trading-analytics.endpoints.redi",
...)
"""
def __init__(
self,
service: str,
universe: Any,
view: "StrStrings",
parameters: dict,
api: str,
extended_params: "ExtendedParams" = None,
) -> None:
self._service = service
if not isinstance(universe, dict):
universe = try_copy_to_list(universe)
self._universe = universe
self._view = try_copy_to_list(view)
self._parameters = parameters
self._api = api
self._extended_params = extended_params
def get_stream(self, session: "Session" = None) -> RDPStream:
"""
Returns the previously defined RDP data stream from the Refinitiv Data Platform.
Parameters
----------
session : Session
Session object. If it's not passed the default session will be used.
Returns
-------
RDPStream instance.
"""
stream = RDPStream(
session=session,
service=self._service,
universe=self._universe,
view=self._view,
parameters=self._parameters,
api=self._api,
extended_params=self._extended_params,
)
return stream | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/_stream/rdp_stream_definition.py | 0.885879 | 0.420778 | rdp_stream_definition.py | pypi |
from typing import Any, Callable, TYPE_CHECKING, Optional
from ._rdp_stream import _RDPStream
from ._stream_factory import create_rdp_stream
from .base_stream import StreamOpenMixin
from ..._content_type import ContentType
from ..._core.session import get_valid_session
from ..._tools import cached_property, create_repr, make_callback
if TYPE_CHECKING:
from ..._types import ExtendedParams
from ... import OpenState
from ..._core.session import Session
class RDPStream(StreamOpenMixin):
"""
Open an RDP stream.
Parameters
----------
service: string, optional
name of RDP service
universe: list
RIC to retrieve item stream.
view: list
data fields to retrieve item stream
parameters: dict
extra parameters to retrieve item stream.
api: string
specific name of RDP streaming defined in config file. i.e.
'streaming/trading-analytics/redi'
extended_params: dict, optional
Specify optional params
Default: None
Raises
------
Exception
If request fails or if Refinitiv Services return an error
Examples
--------
>>> import refinitiv.data as rd
>>> APP_KEY = "APP_KEY"
>>> USERNAME = "USERNAME"
>>> PASSWORD = "PASSWORD"
>>> session = rd.session.platform.Definition(
... app_key=APP_KEY,
... grant=rd.session.platform.GrantPassword(
... username=USERNAME,
... password=PASSWORD,
... )
... ).get_session()
>>> session.open()
>>>
>>> tds = rd.delivery.rdp_stream.Definition(
... service="",
... universe=[],
... view=[],
... parameters={"universeType": "RIC"},
... api='streaming.trading-analytics.endpoints.redi'
... ).get_stream(session)
>>> tds.open()
"""
def __init__(
self,
service: str,
universe: list,
view: list,
parameters: dict,
api: str,
session: Optional["Session"] = None,
extended_params: "ExtendedParams" = None,
) -> None:
self._session = get_valid_session(session)
self._always_use_default_session = session is None
self._service = service
self._universe = universe
self._view = view
self._parameters = parameters
self._api = api
self._extended_params = extended_params
@cached_property
def _stream(self) -> _RDPStream:
return create_rdp_stream(
ContentType.STREAMING_RDP,
api=self._api,
session=self._session,
service=self._service,
universe=self._universe,
view=self._view,
parameters=self._parameters,
extended_params=self._extended_params,
)
def open(self) -> "OpenState":
"""
Opens the RDPStream to start to stream. Once it's opened,
it can be used in order to retrieve data.
Parameters
----------
Returns
-------
OpenState
current state of this RDP stream object.
Examples
--------
>>> from refinitiv.data.delivery import rdp_stream
>>> definition = rdp_stream.Definition(
... service=None,
... universe=[],
... view=None,
... parameters={"universeType": "RIC"},
... api='streaming.trading-analytics.endpoints.redi')
>>> stream = definition.get_stream()
>>> await stream.open_async()
"""
return super().open()
async def open_async(self) -> "OpenState":
"""
Opens asynchronously the RDPStream to start to stream
Parameters
----------
Returns
-------
OpenState
current state of this RDP stream object.
Examples
--------
>>> from refinitiv.data.delivery import rdp_stream
>>> definition = rdp_stream.Definition(
... service=None,
... universe=[],
... view=None,
... parameters={"universeType": "RIC"},
... api='streaming.trading-analytics.endpoints.redi')
>>> stream = definition.get_stream()
>>> await stream.open_async()
"""
return await super().open_async()
def close(self) -> "OpenState":
"""
Closes the RPDStream connection, releases resources
Returns
-------
OpenState
current state of this RDP stream object.
Examples
--------
>>> from refinitiv.data.delivery import rdp_stream
>>> definition = rdp_stream.Definition(
... service=None,
... universe=[],
... view=None,
... parameters={"universeType": "RIC"},
... api='streaming.trading-analytics.endpoints.redi')
>>> stream = definition.get_stream()
>>> stream.open()
>>> stream.close()
"""
return super().close()
def on_ack(self, on_ack: Callable[[dict, "RDPStream"], Any]) -> "RDPStream":
"""
This function called when the stream received an ack message.
Parameters
----------
on_ack : Callable, optional
Callable object to process retrieved ack data
Returns
-------
RDPStream
current instance it is a RDP stream object.
Examples
--------
Prerequisite: The default session must be opened.
>>> from datetime import datetime
>>> from refinitiv.data.delivery import rdp_stream
>>>
>>> def display_response(event, stream):
... print(f'Ack received at {datetime.now}')
... print(event)
>>>
>>> definition = rdp_stream.Definition(
... service=None,
... universe=[],
... view=None,
... parameters={"universeType": "RIC"},
... api='streaming.trading-analytics.endpoints.redi')
>>> stream = definition.get_stream()
>>> stream.on_ack(display_response)
>>>
>>> stream.open()
"""
self._stream.on_ack(make_callback(on_ack))
return self
def on_response(self, on_response: Callable[[dict, "RDPStream"], Any]) -> "RDPStream":
"""
This function called when the stream received an response message.
Parameters
----------
on_response : Callable, optional
Callable object to process retrieved response data
Returns
-------
RDPStream
current instance it is a RDP stream object.
Examples
--------
Prerequisite: The default session must be opened.
>>> from datetime import datetime
>>> from refinitiv.data.delivery import rdp_stream
>>>
>>> def display_response(event, stream):
... print(f'Response received at {datetime.now}')
... print(event)
>>>
>>> definition = rdp_stream.Definition(
... service=None,
... universe=[],
... view=None,
... parameters={"universeType": "RIC"},
... api='streaming.trading-analytics.endpoints.redi')
>>> stream = definition.get_stream()
>>> stream.on_response(display_response)
>>>
>>> stream.open()
"""
self._stream.on_response(make_callback(on_response))
return self
def on_update(self, on_update: Callable[[dict, "RDPStream"], Any]) -> "RDPStream":
"""
This function called when the stream received an update message.
Parameters
----------
on_update : Callable, optional
Callable object to process retrieved update data
Returns
-------
RDPStream
current instance it is a RDP stream object.
Examples
--------
Prerequisite: The default session must be opened.
>>> from datetime import datetime
>>> from refinitiv.data.delivery import rdp_stream
>>>
>>> def display_response(event, stream):
... print(f'Update received at {datetime.now}')
... print(event)
>>>
>>> definition = rdp_stream.Definition(
... service=None,
... universe=[],
... view=None,
... parameters={"universeType": "RIC"},
... api='streaming.trading-analytics.endpoints.redi')
>>> stream = definition.get_stream()
>>> stream.on_update(display_response)
>>>
>>> stream.open()
"""
self._stream.on_update(make_callback(on_update))
return self
def on_alarm(self, on_alarm: Callable[[dict, "RDPStream"], Any]) -> "RDPStream":
"""
This function called when the stream received an alarm message.
Parameters
----------
on_alarm : Callable, optional
Callable object to process retrieved alarm data
Returns
-------
RDPStream
current instance it is a RDP stream object.
Examples
--------
Prerequisite: The default session must be opened.
>>> from datetime import datetime
>>> from refinitiv.data.delivery import rdp_stream
>>>
>>> def display_response(event, stream):
... print(f'Alarm received at {datetime.now}')
... print(event)
>>>
>>> definition = rdp_stream.Definition(
... service=None,
... universe=[],
... view=None,
... parameters={"universeType": "RIC"},
... api='streaming.trading-analytics.endpoints.redi')
>>> stream = definition.get_stream()
>>> stream.on_alarm(display_response)
>>>
>>> stream.open()
"""
self._stream.on_alarm(make_callback(on_alarm))
return self
def __repr__(self):
return create_repr(
self,
middle_path="rdp_stream",
class_name=self.__class__.__name__,
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/_stream/rdp_stream.py | 0.901564 | 0.1873 | rdp_stream.py | pypi |
import abc
from typing import Union
class ContribResponse(abc.ABC):
def __init__(self, message: dict):
self._message = message
@property
@abc.abstractmethod
def is_success(self) -> bool:
pass
@property
def type(self) -> str:
return self._message.get("Type", "")
@property
def ack_id(self) -> Union[int, None]:
return self._message.get("AckID")
@property
def nak_code(self) -> str:
return self._message.get("NakCode", "")
@property
@abc.abstractmethod
def nak_message(self) -> str:
pass
@property
@abc.abstractmethod
def error(self) -> str:
pass
@property
def debug(self) -> dict:
return self._message.get("Debug", {})
def __str__(self) -> str:
return str(self._message)
class RejectedContribResponse(ContribResponse):
@property
def is_success(self) -> bool:
return False
@property
def type(self) -> str:
return "Error"
@property
def error(self) -> str:
return self._message.get("Text")
@property
def nak_message(self) -> str:
return ""
@property
def debug(self) -> dict:
return {}
class AckContribResponse(ContribResponse):
def __repr__(self) -> str:
d = {"Type": self.type, "AckId": self.ack_id}
if self.nak_code:
d["NakCode"] = self.nak_code
d["Message"] = self.nak_message
return str(d)
@property
def is_success(self) -> bool:
return not self.nak_code
@property
def nak_message(self) -> str:
return self._message.get("Text", "")
@property
def error(self) -> str:
return ""
class ErrorContribResponse(ContribResponse):
def __repr__(self) -> str:
d = {
"Type": self.type,
"Text": self.nak_message,
"Debug": self.debug,
}
return str(d)
@property
def is_success(self) -> bool:
return False
@property
def nak_message(self) -> str:
return ""
@property
def error(self) -> str:
return self._message.get("Text", "")
class NullContribResponse(ContribResponse):
def __init__(self):
super().__init__(None)
@property
def is_success(self) -> bool:
return False
@property
def type(self) -> str:
return ""
@property
def ack_id(self) -> Union[int, None]:
return None
@property
def nak_code(self) -> str:
return ""
@property
def nak_message(self) -> str:
return ""
@property
def error(self) -> str:
return ""
@property
def debug(self) -> dict:
return {} | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/_stream/contrib/_response.py | 0.864539 | 0.266657 | _response.py | pypi |
from typing import Union, TYPE_CHECKING
from ._response import ContribResponse, NullContribResponse
from ...._core.session import get_valid_session
from ...._tools import make_callback
if TYPE_CHECKING:
from ._type import ContribType
from ...._core.session import Session
from ...._types import OptStr, OptCall
def contribute(
name: str,
fields: dict,
*,
service: "OptStr" = None,
contrib_type: Union[str, "ContribType", None] = None,
session: "Session" = None,
api: "OptStr" = None,
on_ack: "OptCall" = None,
on_error: "OptCall" = None,
) -> "ContribResponse":
"""
Function to send OffStream contribution request.
Parameters
----------
name: string
RIC to retrieve instrument stream.
fields: dict{field:value}
Specify fields and values to contribute.
service: string, optional
Specify the service to contribute on.
Default: None
contrib_type: Union[str, ContribType], optional
Define the contribution type : ["Refresh", "Update"]
Default: "Update"
session: Session, optional
Specify the session used to contribute
api: string, optional
specific name of contrib streaming defined in config file.
i.e. 'streaming.contrib.endpoints.my_server'
Default: 'streaming.contrib.endpoints.main'
on_ack : function, optional
Callback function for on_ack event to check contribution result
on_error : function, optional
Callback function for on_error event
Returns
----------
ContribResponse
Examples
--------
Prerequisite: The contrib_session must be opened
>>> import refinitiv.data as rd
>>> def on_ack_callback(ack_msg, stream):
... print("Receive Ack response:", ack_msg)
>>> def on_error_callback(error_msg, stream):
... print("Receive Error:", error_msg)
>>> update = {
... "ASK": 1.23,
... "BID": 1.24
... }
>>> response = rd.delivery.omm_stream.contribute(
... name="EUR=",
... fields=update,
... service="SVC_CONTRIB",
... on_ack=on_ack_callback,
... on_error=on_error_callback
... )
"""
from ..._stream._stream_factory import create_offstream_contrib
session = get_valid_session(session)
offstream = create_offstream_contrib(
session=session,
name=name,
api=api,
domain="MarketPrice",
service=service,
)
on_ack and offstream.on_ack(make_callback(on_ack))
on_error and offstream.on_error(make_callback(on_error))
try:
offstream.open()
except ConnectionError:
response = NullContribResponse()
on_error and on_error(offstream.get_contrib_error_message(), offstream)
else:
response = offstream.contribute(fields, contrib_type)
offstream.close()
return response
async def contribute_async(
name: str,
fields: dict,
*,
service: "OptStr" = None,
contrib_type: Union[str, "ContribType", None] = None,
session: "Session" = None,
api: "OptStr" = None,
on_ack: "OptCall" = None,
on_error: "OptCall" = None,
) -> "ContribResponse":
"""
Function to send asynchrnous OffStream contribution request.
Parameters
----------
name: string
RIC to retrieve instrument stream.
fields: dict{field:value}
Specify fields and values to contribute.
service: string, optional
Specify the service to contribute on.
Default: None
contrib_type: Union[str, ContribType], optional
Define the contribution type
Default: "Update"
session: Session, optional
Specify the session used to contribute
api: string, optional
specific name of contrib streaming defined in config file.
i.e. 'streaming.contrib.endpoints.my_server'
Default: 'streaming.contrib.endpoints.main'
on_ack : function, optional
Callback function for on_ack event to check contribution result
on_error : function, optional
Callback function for on_error event
Returns
----------
ContribResponse
Examples
--------
Prerequisite: The contrib_session must be opened.
>>> import refinitiv.data as rd
>>> def on_ack_callback(ack_msg, stream):
... print("Receive Ack response:", ack_msg)
>>> def on_error_callback(error_msg, stream):
... print("Receive Error:", error_msg)
>>> update = {
... "ASK": 1.23,
... "BID": 1.24
... }
>>> response = await rd.delivery.omm_stream.contribute_async(
... "EUR=",
... fields=update,
... service="SVC_CONTRIB",
... on_ack=on_ack_callback,
... on_error=on_error_callback
... )
"""
from ..._stream._stream_factory import create_offstream_contrib
session = get_valid_session(session)
offstream = create_offstream_contrib(
session=session,
name=name,
api=api,
domain="MarketPrice",
service=service,
)
on_ack and offstream.on_ack(make_callback(on_ack))
on_error and offstream.on_error(make_callback(on_error))
try:
await offstream.open_async()
except ConnectionError:
response = NullContribResponse()
on_error and on_error(offstream.get_contrib_error_message(), offstream)
else:
response = await offstream.contribute_async(fields, contrib_type)
offstream.close()
return response | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/_stream/contrib/_funcs.py | 0.868213 | 0.20199 | _funcs.py | pypi |
import datetime
import time
import warnings
from typing import Any, Union, TYPE_CHECKING, Dict, Callable
from dateutil import parser
from refinitiv.data.delivery._stream._validator_exceptions import ValidationException, ValidationsException
from ._types import FieldType, RWFDataType
if TYPE_CHECKING:
from ._field_description import FieldDescription
from ._dictionary import Dictionary
INVALID_VALUE_ERROR_LOG_PATTERN = "Invalid value {0} for field {1}. Field type is {2}. {3}"
STRING_CUT_OFF_LOG_PATTERN = (
"{0} value has been cut off to {1}, original value is {2}, the expected field's length is {3}"
)
INVALID_ENUM_ERROR_LOG_PATTERN = "Invalid enum value {0} for field {1}: {2}"
ONE_DAY_SECS = 86400
class Validator:
@staticmethod
def get_validated_fields_values(dic: "Dictionary", fields: dict) -> dict:
validated_fields_values = {}
errors = {}
for key, value in fields.items():
field_desc = dic.get_field(key)
validated = value
if field_desc is None:
errors[key] = f"Field {key} cannot be found in metadata"
continue
field_desc_name = field_desc.name
if field_desc_name in validated_fields_values:
errors[key] = f"Field {key} already exists in the fields as {field_desc_name}"
continue
rwf_type = field_desc.rwf_type
rwf_len = field_desc.rwf_len
value_str = str(value)
if field_desc.type == FieldType.ENUMERATED:
try:
validated = Validator.validate_enum_value(dic, field_desc, value)
except ValidationException as e:
errors[key] = e.value
elif rwf_type in {RWFDataType.RWF_RMTES_STRING, RWFDataType.RWF_BUFFER} and len(value_str) > rwf_len:
validated = value_str[:rwf_len]
else:
validate_value = mapping.get(rwf_type)
if not validate_value:
errors[key] = INVALID_VALUE_ERROR_LOG_PATTERN.format(
value, field_desc_name, rwf_type, "This type is not supported"
)
continue
try:
validated = validate_value(field_desc, value)
except ValidationException as e:
errors[key] = e.value
validated_fields_values[field_desc_name] = validated
if errors:
raise ValidationsException(errors, validated_fields_values)
return validated_fields_values
@staticmethod
def check_enum_field_value(dic: "Dictionary", field_desc: "FieldDescription", value: Union[str, int]):
error = None
key = field_desc.name
if isinstance(value, str):
enum_value = dic.get_enum_value(key, value)
if enum_value is None:
if value.isdigit():
enum_value = dic.get_enum_display(key, int(value))
if enum_value is None:
error = "invalid enumerated field value"
else:
error = "invalid enumerated field display"
elif isinstance(value, int):
enum_value = dic.get_enum_display(key, value)
if enum_value is None:
error = "invalid enumerated field display/value"
else:
error = "invalid enumerated field display/value"
if error:
raise ValidationException(INVALID_ENUM_ERROR_LOG_PATTERN.format(value, field_desc.name, error))
@staticmethod
def validate_enum_value(dic: "Dictionary", field_desc: "FieldDescription", value: Union[str, int]):
Validator.check_enum_field_value(dic, field_desc, value)
if isinstance(value, str) and not value.isdigit():
validated_value = dic.get_enum_value(field_desc.name, value)
else:
validated_value = int(value)
return validated_value
@staticmethod
def validate_string_value(field_desc: "FieldDescription", value: Any) -> str:
validated_value = value
if not isinstance(value, str):
validated_value = str(value)
if len(validated_value) > field_desc.rwf_len:
validated_value = value[: field_desc.rwf_len]
if not validated_value.isascii():
non_ascii_str = ""
for c in validated_value:
if ord(c) >= 128:
non_ascii_str = non_ascii_str + c
if non_ascii_str:
raise ValidationException(
INVALID_VALUE_ERROR_LOG_PATTERN.format(
validated_value,
field_desc.name,
"ASCII string",
f"It includes non ASCII characters '{non_ascii_str}'",
)
)
return validated_value
@staticmethod
def validate_int_value(field_desc: "FieldDescription", value: Union[int, str]) -> int:
error = None
validated_value = value
if isinstance(value, str):
try:
validated_value = int(value)
except Exception as e:
error = str(e)
elif not isinstance(value, int):
error = "It must be integer."
if error:
raise ValidationException(INVALID_VALUE_ERROR_LOG_PATTERN.format(value, field_desc.name, "INT64", error))
return validated_value
@staticmethod
def validate_uint_value(field_desc: "FieldDescription", value: Union[int, str]) -> int:
validated_value = Validator.validate_int_value(field_desc, value)
if validated_value < 0:
raise ValidationException(
INVALID_VALUE_ERROR_LOG_PATTERN.format(
validated_value, field_desc.name, "UINT64", "It must be positive integer or 0."
)
)
return validated_value
@staticmethod
def validate_real_value(field_desc: "FieldDescription", value: Union[int, float, str]) -> Union[int, float]:
if isinstance(value, float) or isinstance(value, int):
return value
elif isinstance(value, str):
for type_value in [int, float]:
try:
value = type_value(value)
return value
except ValueError:
pass
raise ValidationException(
INVALID_VALUE_ERROR_LOG_PATTERN.format(
value, field_desc.name, "REAL64", f"field value {value} is not valid for REAL64"
)
)
@staticmethod
def validate_time_seconds_value(field_desc: "FieldDescription", value: Union[int, datetime.time, str]) -> str:
valid = True
error = None
time_format = "hh:mm:ss.mmm"
_timespec = "seconds"
if field_desc.rwf_len == 8:
_timespec = "milliseconds"
if isinstance(value, int):
if value < 0:
valid = False
error = f"{field_desc.name} as an integer must be positive."
elif value >= ONE_DAY_SECS:
warnings.warn(
f"For TIME field {field_desc.name}, the number means seconds of a day, {value} exceeds 86399"
)
else:
_time = time.localtime(value)
value = datetime.time(_time.tm_hour, _time.tm_min, _time.tm_sec).isoformat(timespec=_timespec)
elif isinstance(value, datetime.time):
value = value.isoformat(timespec=_timespec)
elif isinstance(value, str):
try:
date_value = parser.parse(value)
value = date_value.time().isoformat(_timespec)
except parser.ParserError as e:
valid = False
error = str(e)
else:
valid = False
if not valid:
raise ValidationException(
INVALID_VALUE_ERROR_LOG_PATTERN.format(
value, field_desc.name, f"TIME[{time_format[:field_desc.rwf_len]}]", error
)
)
return value
@staticmethod
def validate_date_value(field_desc: "FieldDescription", value: Union[datetime.date, str, int, float]) -> str:
valid = True
error = None
if isinstance(value, datetime.date):
# convert field value from datetime.date to str.
value = value.isoformat()
elif isinstance(value, str):
try:
date_value = parser.parse(value)
value = datetime.date(date_value.year, date_value.month, date_value.day).isoformat()
except parser.ParserError as e:
valid = False
error = str(e)
elif isinstance(value, float) or isinstance(value, int):
_time = time.localtime(value)
value = datetime.date(_time.tm_year, _time.tm_mon, _time.tm_mday)
else:
valid = False
if not valid:
raise ValidationException(INVALID_VALUE_ERROR_LOG_PATTERN.format(value, field_desc.name, "DATE", error))
return value
mapping: Dict[RWFDataType, Callable[["FieldDescription", Any], Any]] = {
RWFDataType.RWF_ASCII_STRING: Validator.validate_string_value,
RWFDataType.RWF_INT64: Validator.validate_int_value,
RWFDataType.RWF_UINT64: Validator.validate_uint_value,
RWFDataType.RWF_REAL64: Validator.validate_real_value,
RWFDataType.RWF_TIME_SECONDS: Validator.validate_time_seconds_value,
RWFDataType.RWF_DATE: Validator.validate_date_value,
}
validator = Validator() | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/_stream/metadata/_validator.py | 0.688468 | 0.303887 | _validator.py | pypi |
from typing import Union, List, Dict, TYPE_CHECKING
from ._dictionary_type import DictionaryType
from ._enum_type_entry import EnumTypeEntry, create_enum_entry
from ._field_description import FieldDescription, create_field_description
from ._validator import validator
from .._stream_factory import create_dictionary_stream
from .._validator_exceptions import ValidationException
from ...._content_type import ContentType
from ...._core.log_reporter import PrvLogReporterMixin
from ...._core.session import get_default
from ...._tools import version_to_tuple
if TYPE_CHECKING:
from ._stream import PrvDictionaryStream
from ...._core.session import Session
class Dictionary(PrvLogReporterMixin):
"""
Examples
--------
If user create a session and pass it to Dictionary, then load method will use passed session
>>> import refinitiv.data as rd
>>> session = rd.session.Definition().get_session()
>>> dictionary = rd.delivery._dictionary.Dictionary(session=session)
>>> dictionary.load()
If user create a session and set it default, then load method will use default session
>>> import refinitiv.data as rd
>>> sess_definition = rd.session.Definition()
>>> dictionary = rd.delivery._dictionary.Dictionary()
>>> session_A = sess_definition.get_session()
>>> rd.session.set_default(session_A)
>>> dictionary.load() # will use session_A
>>> session_B = sess_definition.get_session()
>>> rd.session.set_default(session_B)
>>> dictionary.load() # will use session_B
If user create a session and pass it to Dictionary,
then load method will use passed session even if exists default session
>>> import refinitiv.data as rd
>>> sess_definition = rd.session.Definition()
>>> session_A = sess_definition.get_session()
>>> dictionary = rd.delivery._dictionary.Dictionary(session=session_A)
>>> session_B = sess_definition.get_session()
>>> rd.session.set_default(session_B)
>>> dictionary.load() # will use session_A
"""
def __init__(self, session: "Session" = None, service: str = None) -> None:
from . import check_websocket_version
check_websocket_version()
self._session = session
self._session_from_ctor = bool(session)
self._session and self._init_logger(self._session.logger())
self._service = service
self._fid_to_field_desc: Dict[int, FieldDescription] = {}
self._acronym_to_field_desc: Dict[str, FieldDescription] = {}
self._fid_to_enum_type: Dict[int, EnumTypeEntry] = {}
self._dict_type_to_version: Dict[DictionaryType, tuple] = {}
self._rippled_fields = set()
@property
def is_field_dict_available(self) -> bool:
return bool(self._fid_to_field_desc)
@property
def is_enum_dict_available(self) -> bool:
return bool(self._fid_to_enum_type)
@property
def versions(self) -> dict:
"""
retrieve the field and enum type metadata version. e.g: {"RWFFld": (4,20,30), "RWFEnum": (17, 91)}
Returns
-------
dict {"RWFFld": fld_version, "RWFEnum": enum_version}
The first item is field dictionary version and the second item is enum type dictionary version
Examples
--------
>>> from refinitiv.data.delivery._dictionary import Dictionary, DictionaryType
>>> dictionary = Dictionary()
>>> dictionary.load()
>>> dictionary.versions
{"RWFFld": (4,20,30), "RWFEnum": (17, 91)}
>>> dictionary.versions['RWFEnum']
(17, 91)
"""
return self._dict_type_to_version
def get_field(self, key: Union[int, str]) -> Union[FieldDescription, None]:
"""
To retrieve the field's information by name or fid,
including "name", "long_name", "fid", "ripple_to", "field_type", "length", "rwf_type", "rwf_len"
Parameters
----------
key : str | int
If it is string, it is parsed as name; if it is int, it is parsed as fid.
Returns
----------
FieldDescription | None
Return all information for one field defined in metadata or None if all acronyms cannot be found in metadata
Examples
----------
>>> from refinitiv.data.delivery._dictionary import Dictionary
>>> dictionary = Dictionary()
>>> dictionary.load()
>>> keys = ['BID', 4] # 4 stands for RDN_EXCHID
>>> for key in keys:
... field = dictionary.get_field(key)
... print(field)
"""
field_desc = None
# key is a "name"
if isinstance(key, str) and key in self._acronym_to_field_desc:
field_desc = self._acronym_to_field_desc[key]
# key is a "fid"
elif isinstance(key, int) and key in self._fid_to_field_desc:
field_desc = self._fid_to_field_desc[key]
return field_desc
def get_enum_display(self, key: Union[int, str], value: int) -> Union[str, None]:
"""
To retrieve enumerated field's value by enum id.
For example, one of the enumerated type for field RDN_EXCHID "ASE" and its value is 1, it will return the str value "ASE".
Parameters
----------
key : str | int
To identify the field. If it is a string, it is parsed as a name; if it is an int, it is parsed as a fid.
value : int
The value of the enumerated field.
Returns
----------
int | None
Field's value for specified display or None if the field is not enum type or the field is not defined in metadata.
Examples
----------
>>> from refinitiv.data.delivery._dictionary import Dictionary
>>> dictionary = Dictionary()
>>> dictionary.load()
>>> dictionary.get_enum_display("RDN_EXCHID", 1)
"ASE"
>>> dictionary.get_enum_display(4, 1)
"ASE"
"""
field_desc = self.get_field(key)
if field_desc is None:
return None
enum_display = None
if field_desc.enum_length > 0:
enum_type = self._fid_to_enum_type.get(field_desc.fid)
if enum_type and value in enum_type.values:
index = enum_type.values.index(value)
if len(enum_type.displays) > index:
enum_display = enum_type.displays[index]
return enum_display
def get_enum_value(self, key: Union[int, str], display: str) -> Union[int, None]:
"""
To retrieve enumerated field's value by display.
For example, one of the enumerated type for field RDN_EXCHID "ASE" and its value is 1, it will return the int value 1.
Parameters
----------
key : str | int
To identify the field. If it is a string, it is parsed as a name; if it is an int, it is parsed as a fid.
display: str
The value of the enumerated field.
Returns
----------
int | None
Field's value for specified display or None, if the field is not enum type or the field is not defined in metadata.
Examples
----------
>>> from refinitiv.data.delivery._dictionary import Dictionary
>>> dictionary = Dictionary()
>>> dictionary.load()
>>> dictionary.get_enum_value("RDN_EXCHID", "ASE")
1
>>> dictionary.get_enum_value(4, "ASE")
1
"""
field_desc = self.get_field(key)
if field_desc is None:
return None
value = None
if field_desc.enum_length > 0:
enum_type = self._fid_to_enum_type.get(field_desc.fid)
if enum_type and display in enum_type.displays:
index = enum_type.displays.index(display)
if len(enum_type.displays) > index:
value = enum_type.values[index]
return value
def is_valid_enum_field(self, key: Union[str, int], value: Union[str, int]) -> bool:
is_valid = True
try:
validator.check_enum_field_value(key, value)
except ValidationException:
is_valid = False
return is_valid
def validate(self, fields: dict, **kwargs) -> dict:
"""
To check whether several fields' key and value are compliance with the dictionary definition.
If any field is invalid, it will be returned with the error message, also it will be stored in the log.
Parameters
----------
fields: dict
Which key is field name or fid, its value is the value of the field.
kwargs: key1=value1, key2=value2, ......
Returns
----------
dict
The dict of invalid fields and the error message. The key is field name or fid according to the passed
in argument, its value is detailed error message for this field.
Examples
----------
>>> from refinitiv.data.delivery._dictionary import Dictionary
>>> dictionary = Dictionary()
>>> dictionary.load()
>>> dictionary.validate({"ASK":1.1, "BID": 1.2}, ASKSIZE=100, BIDSIZE=110)
{
'ASK': 'Field ASK cannot be found in metadata',
'ASKSIZE': 'Field ASKSIZE cannot be found in metadata',
'BID': 'Field BID cannot be found in metadata',
'BIDSIZE': 110
}
"""
if not self.is_field_dict_available:
raise ValidationException("Metadata not available")
fields.update(kwargs)
return validator.get_validated_fields_values(self, fields)
def is_ripple_to_field(self, field_id: Union[int, str]) -> bool:
if isinstance(field_id, str) and field_id in self._acronym_to_field_desc:
field_id = self._acronym_to_field_desc[field_id]
if isinstance(field_id, int) and field_id in self._fid_to_field_desc:
return field_id in self._rippled_fields
return False
def load(self, dictionary_type: Union[str, DictionaryType] = None, api: str = None) -> None:
"""
Parameters
----------
dictionary_type: DictionaryType, optional
api: str, optional
Specifies the data source. It can be updated/added using config file
Returns
-------
None
Examples
----------
>>> from refinitiv.data.delivery._dictionary import Dictionary, DictionaryType
>>> dictionary = Dictionary()
>>> dictionary.load() # will load all types
>>> dictionary = Dictionary()
>>> dictionary.load(DictionaryType.RWF_FLD) # will load only field data
"""
if not self._session_from_ctor:
self._session = get_default()
self._init_logger(self._session.logger())
load = False
if dictionary_type == DictionaryType.RWF_FLD or not dictionary_type:
load = True
field_stream = create_dictionary_stream(
ContentType.STREAMING_DICTIONARY,
domain="Dictionary",
name=DictionaryType.RWF_FLD,
api=api,
session=self._session,
service=self._service,
on_refresh=self._on_refresh,
)
field_stream.open(with_updates=False)
field_stream.close()
if dictionary_type == DictionaryType.RWF_ENUM or not dictionary_type:
load = True
enum_stream = create_dictionary_stream(
ContentType.STREAMING_DICTIONARY,
domain="Dictionary",
name=DictionaryType.RWF_ENUM,
api=api,
session=self._session,
service=self._service,
on_refresh=self._on_refresh,
)
enum_stream.open(with_updates=False)
enum_stream.close()
if not load:
raise ValueError(f"Nothing to load for {dictionary_type}")
def _on_refresh(self, stream: "PrvDictionaryStream", message: dict):
domain = message.get("Domain")
if domain != "Dictionary":
return
message_state = message.get("State", {})
if message_state.get("Data") != "Ok":
return
key = message.get("Key", {})
if not key or key.get("Name") != stream.name:
return
self._fill_dictionary(stream.name, message.get("Series"))
def _fill_dictionary(self, dictionary_type: Union[str, DictionaryType], series: dict) -> bool:
if not series:
return False
version = series.get("Summary", {}).get("Elements", {}).get("Version")
if not version:
return False
curr_version = self._dict_type_to_version.get(dictionary_type)
if curr_version is None:
self._dict_type_to_version[dictionary_type] = version_to_tuple(version)
else:
# check if version is newer
if version_to_tuple(version) <= curr_version:
return False
entries = series.get("Entries")
if not entries:
return False
if dictionary_type == DictionaryType.RWF_FLD:
self._fill_field_dictionary(entries)
elif dictionary_type == DictionaryType.RWF_ENUM:
self._fill_enum_type_dictionary(entries)
return True
def _fill_field_dictionary(self, entries: List[dict]):
for elements in entries:
if not elements:
continue
try:
field_desc = create_field_description(elements["Elements"])
except KeyError:
continue
fid = field_desc.fid
name = field_desc.name
if fid not in self._fid_to_field_desc:
self._fid_to_field_desc[fid] = field_desc
self._acronym_to_field_desc[name] = field_desc
if field_desc.ripple_to != 0:
self._rippled_fields.add(fid)
def _fill_enum_type_dictionary(self, entries: List[dict]):
for elements in entries:
if not elements:
continue
try:
elements = elements["Elements"]
fids = elements["FIDS"]["Data"]["Data"]
enum_type_entry = create_enum_entry(elements["VALUE"]["Data"], elements["DISPLAY"]["Data"])
except KeyError:
continue
for fid in fids:
self._fid_to_enum_type[fid] = enum_type_entry | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/_stream/metadata/_dictionary.py | 0.854415 | 0.22733 | _dictionary.py | pypi |
from itertools import zip_longest
from typing import Generic, List, TYPE_CHECKING, Type, TypeVar, Union
from ._data_factory import BaseDataFactory
from ._endpoint_data import EndpointData, Error
from ._response import Response
if TYPE_CHECKING:
from ._parsed_data import ParsedData
import httpx
TypeResponse = TypeVar("TypeResponse")
def get_closure(response: Union[List["httpx.Response"], "httpx.Response"]) -> Union[str, List[str]]:
if isinstance(response, list):
return [resp.request.headers.get("closure") for resp in response]
return response.request.headers.get("closure")
class BaseResponseFactory(Generic[TypeResponse]):
response_class: Type[TypeResponse]
@staticmethod
def get_raw(parsed_data: "ParsedData") -> Union[dict, list, str]:
return parsed_data.content_data
def create_response(self, is_success: bool, parsed_data: "ParsedData", **kwargs) -> TypeResponse:
if is_success:
return self.create_success(parsed_data, **kwargs)
else:
return self.create_fail(parsed_data, **kwargs)
def create_success(self, parsed_data: "ParsedData", **kwargs) -> TypeResponse:
return self._do_create_response(True, self.get_raw(parsed_data), parsed_data, **kwargs)
def create_fail(self, parsed_data: "ParsedData", **kwargs) -> TypeResponse:
return self._do_create_response(False, parsed_data.content_data or {}, parsed_data, **kwargs)
def _do_create_response(
self,
is_success: bool,
raw: Union[dict, list, str],
parsed_data: "ParsedData",
**kwargs,
) -> TypeResponse:
http_response = parsed_data.raw_response
return self.response_class(
is_success,
http_response.request,
http_response,
http_response.headers,
http_status=parsed_data.status,
errors=[Error(code, msg) for code, msg in zip_longest(parsed_data.error_codes, parsed_data.error_messages)],
closure=get_closure(http_response),
requests_count=1,
_data_factory=self,
_kwargs=kwargs,
_raw=raw,
)
class ResponseFactory(BaseDataFactory[EndpointData], BaseResponseFactory[Response]):
def __init__(
self,
response_class: Type[Response] = None,
data_class: Type[EndpointData] = None,
):
self.response_class = response_class or Response
self.data_class = data_class or EndpointData | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/_data/_response_factory.py | 0.721743 | 0.164752 | _response_factory.py | pypi |
from typing import TYPE_CHECKING, Any, Union
from ._api_type import APIType
from ._data_provider import default_data_provider
from ._data_type import DataType
from ._endpoint_data_provider import endpoint_data_provider
from ..cfs._cfs_data_provider import (
cfs_buckets_data_provider,
cfs_file_sets_data_provider,
cfs_files_data_provider,
cfs_packages_data_provider,
cfs_stream_data_provider,
)
from ..._tools._common import urljoin
from ..._content_type import ContentType
from ...content.custom_instruments._custom_instruments_data_provider import (
custom_instrument_data_provider,
custom_instrument_search_data_provider,
custom_instruments_events_data_provider,
custom_instruments_interday_summaries_data_provider,
custom_instruments_intraday_summaries_data_provider,
)
from ...content.esg._esg_data_provider import esg_data_provider
from ...content.estimates._data_provider import estimates_data_provider
from ...content.filings._retrieval_data_provider import filings_retrieval_data_provider
from ...content.filings._search_data_provider import filings_search_data_provider
from ...content.fundamental_and_reference._data_provider import data_grid_rdp_data_provider, data_grid_udf_data_provider
from ...content.historical_pricing._historical_pricing_data_provider import (
hp_events_data_provider,
hp_summaries_data_provider,
)
from ...content.ipa._curves._curves_data_provider import (
curves_data_provider,
cross_currency_curves_definitions_data_provider,
cross_currency_curves_definitions_delete_data_provider,
cross_currency_curves_triangulate_definitions_data_provider,
curve_data_provider,
forward_curves_data_provider,
)
from ...content.ipa._surfaces._surfaces_data_provider import surfaces_data_provider, swaption_surfaces_data_provider
from ...content.ipa.dates_and_calendars.add_periods._add_periods_data_provider import add_period_data_provider
from ...content.ipa.dates_and_calendars.holidays._holidays_data_provider import holidays_data_provider
from ...content.ipa.dates_and_calendars.count_periods._count_periods_data_provider import count_periods_data_provider
from ...content.ipa.dates_and_calendars.date_schedule._date_schedule_data_provider import date_schedule_data_provider
from ...content.ipa.dates_and_calendars.is_working_day._is_working_day_data_provider import is_working_day_data_provider
from ...content.ipa.financial_contracts._contracts_data_provider import contracts_data_provider
from ...content.news.story._data_provider import news_story_data_provider_rdp, news_story_data_provider_udf
from ...content.news.headlines._data_provider import news_headlines_data_provider_udf, news_headlines_data_provider_rdp
from ...content.news.online_reports._data_provider import news_online_reports_data_provider
from ...content.news.online_reports.hierarchy._data_provider import news_online_reports_hierarchy_data_provider
from ...content.news.top_news._data_provider import news_top_news_data_provider
from ...content.news.top_news.hierarchy._data_provider import news_top_news_hierarchy_data_provider
from ...content.news.images._data_provider import news_images_data_provider
from ...content.ownership._ownership_data_provider import ownership_data_provider
from ...content.pricing._pricing_content_provider import pricing_data_provider
from ...content.pricing.chain._chains_data_provider import chains_data_provider
from ...content.search._data_provider import search_data_provider, lookup_data_provider, metadata_data_provider
if TYPE_CHECKING:
from ._data_provider import DataProvider
from ..._configure import _RDPConfig
data_provider_by_data_type = {
ContentType.CHAINS: chains_data_provider,
ContentType.CONTRACTS: contracts_data_provider,
ContentType.CUSTOM_INSTRUMENTS_EVENTS: custom_instruments_events_data_provider,
ContentType.CUSTOM_INSTRUMENTS_INSTRUMENTS: custom_instrument_data_provider,
ContentType.CUSTOM_INSTRUMENTS_INTERDAY_SUMMARIES: custom_instruments_interday_summaries_data_provider,
ContentType.CUSTOM_INSTRUMENTS_INTRADAY_SUMMARIES: custom_instruments_intraday_summaries_data_provider,
ContentType.CUSTOM_INSTRUMENTS_SEARCH: custom_instrument_search_data_provider,
ContentType.DATA_GRID_RDP: data_grid_rdp_data_provider,
ContentType.DATA_GRID_UDF: data_grid_udf_data_provider,
ContentType.DEFAULT: default_data_provider,
ContentType.DISCOVERY_LOOKUP: lookup_data_provider,
ContentType.DISCOVERY_METADATA: metadata_data_provider,
ContentType.DISCOVERY_SEARCH: search_data_provider,
ContentType.ESG_BASIC_OVERVIEW: esg_data_provider,
ContentType.ESG_FULL_MEASURES: esg_data_provider,
ContentType.ESG_FULL_SCORES: esg_data_provider,
ContentType.ESG_STANDARD_MEASURES: esg_data_provider,
ContentType.ESG_STANDARD_SCORES: esg_data_provider,
ContentType.ESG_UNIVERSE: esg_data_provider,
ContentType.ESTIMATES_VIEW_ACTUALS_ANNUAL: estimates_data_provider,
ContentType.ESTIMATES_VIEW_ACTUALS_INTERIM: estimates_data_provider,
ContentType.ESTIMATES_VIEW_ACTUALS_KPI_ANNUAL: estimates_data_provider,
ContentType.ESTIMATES_VIEW_ACTUALS_KPI_INTERIM: estimates_data_provider,
ContentType.ESTIMATES_VIEW_SUMMARY_ANNUAL: estimates_data_provider,
ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_NON_PERIODIC_MEASURES: estimates_data_provider,
ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_PERIODIC_MEASURES_ANNUAL: estimates_data_provider,
ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_PERIODIC_MEASURES_INTERIM: estimates_data_provider,
ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_RECOMMENDATIONS: estimates_data_provider,
ContentType.ESTIMATES_VIEW_SUMMARY_INTERIM: estimates_data_provider,
ContentType.ESTIMATES_VIEW_SUMMARY_KPI_ANNUAL: estimates_data_provider,
ContentType.ESTIMATES_VIEW_SUMMARY_KPI_HISTORICAL_SNAPSHOTS_KPI: estimates_data_provider,
ContentType.ESTIMATES_VIEW_SUMMARY_KPI_INTERIM: estimates_data_provider,
ContentType.ESTIMATES_VIEW_SUMMARY_NON_PERIODIC_MEASURES: estimates_data_provider,
ContentType.ESTIMATES_VIEW_SUMMARY_RECOMMENDATIONS: estimates_data_provider,
ContentType.FILINGS_RETRIEVAL: filings_retrieval_data_provider,
ContentType.FILINGS_SEARCH: filings_search_data_provider,
ContentType.BOND_CURVE: curves_data_provider,
ContentType.FORWARD_CURVE: forward_curves_data_provider,
ContentType.CROSS_CURRENCY_CURVES_CURVES: curve_data_provider,
ContentType.CROSS_CURRENCY_CURVES_DEFINITIONS_CREATE: cross_currency_curves_definitions_data_provider,
ContentType.CROSS_CURRENCY_CURVES_DEFINITIONS_DELETE: cross_currency_curves_definitions_delete_data_provider,
ContentType.CROSS_CURRENCY_CURVES_DEFINITIONS_GET: cross_currency_curves_definitions_data_provider,
ContentType.CROSS_CURRENCY_CURVES_DEFINITIONS_UPDATE: cross_currency_curves_definitions_data_provider,
ContentType.CROSS_CURRENCY_CURVES_DEFINITIONS_SEARCH: curve_data_provider,
ContentType.CROSS_CURRENCY_CURVES_TRIANGULATE_DEFINITIONS_SEARCH: cross_currency_curves_triangulate_definitions_data_provider,
ContentType.HISTORICAL_PRICING_EVENTS: hp_events_data_provider,
ContentType.HISTORICAL_PRICING_INTERDAY_SUMMARIES: hp_summaries_data_provider,
ContentType.HISTORICAL_PRICING_INTRADAY_SUMMARIES: hp_summaries_data_provider,
ContentType.NEWS_HEADLINES_RDP: news_headlines_data_provider_rdp,
ContentType.NEWS_HEADLINES_UDF: news_headlines_data_provider_udf,
ContentType.NEWS_STORY_RDP: news_story_data_provider_rdp,
ContentType.NEWS_STORY_UDF: news_story_data_provider_udf,
ContentType.NEWS_IMAGES: news_images_data_provider,
ContentType.NEWS_TOP_NEWS_HIERARCHY: news_top_news_hierarchy_data_provider,
ContentType.NEWS_TOP_NEWS: news_top_news_data_provider,
ContentType.NEWS_ONLINE_REPORTS: news_online_reports_data_provider,
ContentType.NEWS_ONLINE_REPORTS_HIERARCHY: news_online_reports_hierarchy_data_provider,
ContentType.OWNERSHIP_CONSOLIDATED_BREAKDOWN: ownership_data_provider,
ContentType.OWNERSHIP_CONSOLIDATED_CONCENTRATION: ownership_data_provider,
ContentType.OWNERSHIP_CONSOLIDATED_INVESTORS: ownership_data_provider,
ContentType.OWNERSHIP_CONSOLIDATED_RECENT_ACTIVITY: ownership_data_provider,
ContentType.OWNERSHIP_CONSOLIDATED_SHAREHOLDERS_HISTORY_REPORT: ownership_data_provider,
ContentType.OWNERSHIP_CONSOLIDATED_SHAREHOLDERS_REPORT: ownership_data_provider,
ContentType.OWNERSHIP_CONSOLIDATED_TOP_N_CONCENTRATION: ownership_data_provider,
ContentType.OWNERSHIP_FUND_BREAKDOWN: ownership_data_provider,
ContentType.OWNERSHIP_FUND_CONCENTRATION: ownership_data_provider,
ContentType.OWNERSHIP_FUND_HOLDINGS: ownership_data_provider,
ContentType.OWNERSHIP_FUND_INVESTORS: ownership_data_provider,
ContentType.OWNERSHIP_FUND_RECENT_ACTIVITY: ownership_data_provider,
ContentType.OWNERSHIP_FUND_SHAREHOLDERS_HISTORY_REPORT: ownership_data_provider,
ContentType.OWNERSHIP_FUND_SHAREHOLDERS_REPORT: ownership_data_provider,
ContentType.OWNERSHIP_FUND_TOP_N_CONCENTRATION: ownership_data_provider,
ContentType.OWNERSHIP_INSIDER_SHAREHOLDERS_REPORT: ownership_data_provider,
ContentType.OWNERSHIP_INSIDER_TRANSACTION_REPORT: ownership_data_provider,
ContentType.OWNERSHIP_INVESTOR_HOLDINGS: ownership_data_provider,
ContentType.OWNERSHIP_ORG_INFO: ownership_data_provider,
ContentType.PRICING: pricing_data_provider,
ContentType.SURFACES: surfaces_data_provider,
ContentType.SURFACES_SWAPTION: swaption_surfaces_data_provider,
ContentType.DATES_AND_CALENDARS_ADD_PERIODS: add_period_data_provider,
ContentType.DATES_AND_CALENDARS_HOLIDAYS: holidays_data_provider,
ContentType.DATES_AND_CALENDARS_COUNT_PERIODS: count_periods_data_provider,
ContentType.DATES_AND_CALENDARS_DATE_SCHEDULE: date_schedule_data_provider,
ContentType.DATES_AND_CALENDARS_IS_WORKING_DAY: is_working_day_data_provider,
ContentType.ZC_CURVE_DEFINITIONS: curves_data_provider,
ContentType.ZC_CURVES: curves_data_provider,
DataType.CFS_BUCKETS: cfs_buckets_data_provider,
DataType.CFS_FILE_SETS: cfs_file_sets_data_provider,
DataType.CFS_FILES: cfs_files_data_provider,
DataType.CFS_PACKAGES: cfs_packages_data_provider,
DataType.CFS_STREAM: cfs_stream_data_provider,
DataType.ENDPOINT: endpoint_data_provider,
}
api_config_key_by_api_type = {
APIType.CFS: "apis.file-store",
APIType.CURVES_AND_SURFACES: "apis.data.quantitative-analytics-curves-and-surfaces",
APIType.FINANCIAL_CONTRACTS: "apis.data.quantitative-analytics-financial-contracts",
APIType.DATES_AND_CALENDARS: "apis.data.quantitative-analytics-dates-and-calendars",
APIType.HISTORICAL_PRICING: "apis.data.historical-pricing",
APIType.ESG: "apis.data.environmental-social-governance",
APIType.PRICING: "apis.data.pricing",
APIType.OWNERSHIP: "apis.data.ownership",
APIType.CHAINS: "apis.data.pricing",
APIType.DATA_GRID: "apis.data.datagrid",
APIType.NEWS: "apis.data.news",
APIType.DISCOVERY: "apis.discovery.search",
APIType.ESTIMATES: "apis.data.estimates",
APIType.CUSTOM_INSTRUMENTS: "apis.data.custom-instruments",
APIType.FILINGS: "apis.data.filings",
APIType.DATA_STORE: "apis.data-store",
}
api_type_by_data_type = {
DataType.CFS_BUCKETS: APIType.CFS,
DataType.CFS_FILE_SETS: APIType.CFS,
DataType.CFS_FILES: APIType.CFS,
DataType.CFS_PACKAGES: APIType.CFS,
DataType.CFS_STREAM: APIType.CFS,
ContentType.BOND_CURVE: APIType.CURVES_AND_SURFACES,
ContentType.FORWARD_CURVE: APIType.CURVES_AND_SURFACES,
ContentType.CROSS_CURRENCY_CURVES_CURVES: APIType.CURVES_AND_SURFACES,
ContentType.CROSS_CURRENCY_CURVES_DEFINITIONS_CREATE: APIType.CURVES_AND_SURFACES,
ContentType.CROSS_CURRENCY_CURVES_DEFINITIONS_DELETE: APIType.CURVES_AND_SURFACES,
ContentType.CROSS_CURRENCY_CURVES_DEFINITIONS_GET: APIType.CURVES_AND_SURFACES,
ContentType.CROSS_CURRENCY_CURVES_DEFINITIONS_UPDATE: APIType.CURVES_AND_SURFACES,
ContentType.CROSS_CURRENCY_CURVES_DEFINITIONS_SEARCH: APIType.CURVES_AND_SURFACES,
ContentType.CROSS_CURRENCY_CURVES_TRIANGULATE_DEFINITIONS_SEARCH: APIType.CURVES_AND_SURFACES,
ContentType.ZC_CURVES: APIType.CURVES_AND_SURFACES,
ContentType.ZC_CURVE_DEFINITIONS: APIType.CURVES_AND_SURFACES,
ContentType.SURFACES: APIType.CURVES_AND_SURFACES,
ContentType.SURFACES_SWAPTION: APIType.CURVES_AND_SURFACES,
ContentType.CONTRACTS: APIType.FINANCIAL_CONTRACTS,
ContentType.DATES_AND_CALENDARS_ADD_PERIODS: APIType.DATES_AND_CALENDARS,
ContentType.DATES_AND_CALENDARS_HOLIDAYS: APIType.DATES_AND_CALENDARS,
ContentType.DATES_AND_CALENDARS_COUNT_PERIODS: APIType.DATES_AND_CALENDARS,
ContentType.DATES_AND_CALENDARS_DATE_SCHEDULE: APIType.DATES_AND_CALENDARS,
ContentType.DATES_AND_CALENDARS_IS_WORKING_DAY: APIType.DATES_AND_CALENDARS,
ContentType.HISTORICAL_PRICING_EVENTS: APIType.HISTORICAL_PRICING,
ContentType.HISTORICAL_PRICING_INTERDAY_SUMMARIES: APIType.HISTORICAL_PRICING,
ContentType.HISTORICAL_PRICING_INTRADAY_SUMMARIES: APIType.HISTORICAL_PRICING,
ContentType.ESG_STANDARD_SCORES: APIType.ESG,
ContentType.ESG_STANDARD_MEASURES: APIType.ESG,
ContentType.ESG_FULL_MEASURES: APIType.ESG,
ContentType.ESG_FULL_SCORES: APIType.ESG,
ContentType.ESG_BASIC_OVERVIEW: APIType.ESG,
ContentType.ESG_UNIVERSE: APIType.ESG,
ContentType.PRICING: APIType.PRICING,
ContentType.OWNERSHIP_CONSOLIDATED_BREAKDOWN: APIType.OWNERSHIP,
ContentType.OWNERSHIP_CONSOLIDATED_CONCENTRATION: APIType.OWNERSHIP,
ContentType.OWNERSHIP_CONSOLIDATED_INVESTORS: APIType.OWNERSHIP,
ContentType.OWNERSHIP_CONSOLIDATED_RECENT_ACTIVITY: APIType.OWNERSHIP,
ContentType.OWNERSHIP_CONSOLIDATED_SHAREHOLDERS_HISTORY_REPORT: APIType.OWNERSHIP,
ContentType.OWNERSHIP_CONSOLIDATED_SHAREHOLDERS_REPORT: APIType.OWNERSHIP,
ContentType.OWNERSHIP_CONSOLIDATED_TOP_N_CONCENTRATION: APIType.OWNERSHIP,
ContentType.OWNERSHIP_FUND_CONCENTRATION: APIType.OWNERSHIP,
ContentType.OWNERSHIP_FUND_BREAKDOWN: APIType.OWNERSHIP,
ContentType.OWNERSHIP_FUND_INVESTORS: APIType.OWNERSHIP,
ContentType.OWNERSHIP_FUND_RECENT_ACTIVITY: APIType.OWNERSHIP,
ContentType.OWNERSHIP_FUND_SHAREHOLDERS_HISTORY_REPORT: APIType.OWNERSHIP,
ContentType.OWNERSHIP_FUND_SHAREHOLDERS_REPORT: APIType.OWNERSHIP,
ContentType.OWNERSHIP_FUND_TOP_N_CONCENTRATION: APIType.OWNERSHIP,
ContentType.OWNERSHIP_FUND_HOLDINGS: APIType.OWNERSHIP,
ContentType.OWNERSHIP_INSIDER_SHAREHOLDERS_REPORT: APIType.OWNERSHIP,
ContentType.OWNERSHIP_INSIDER_TRANSACTION_REPORT: APIType.OWNERSHIP,
ContentType.OWNERSHIP_INVESTOR_HOLDINGS: APIType.OWNERSHIP,
ContentType.OWNERSHIP_ORG_INFO: APIType.OWNERSHIP,
ContentType.CHAINS: APIType.CHAINS,
ContentType.DATA_GRID_RDP: APIType.DATA_GRID,
ContentType.DATA_GRID_UDF: APIType.DATA_GRID,
ContentType.NEWS_HEADLINES_RDP: APIType.NEWS,
ContentType.NEWS_HEADLINES_UDF: APIType.NEWS,
ContentType.NEWS_STORY_RDP: APIType.NEWS,
ContentType.NEWS_STORY_UDF: APIType.NEWS,
ContentType.NEWS_TOP_NEWS_HIERARCHY: APIType.NEWS,
ContentType.NEWS_TOP_NEWS: APIType.NEWS,
ContentType.NEWS_ONLINE_REPORTS: APIType.NEWS,
ContentType.NEWS_ONLINE_REPORTS_HIERARCHY: APIType.NEWS,
ContentType.NEWS_IMAGES: APIType.NEWS,
ContentType.DISCOVERY_SEARCH: APIType.DISCOVERY,
ContentType.DISCOVERY_LOOKUP: APIType.DISCOVERY,
ContentType.DISCOVERY_METADATA: APIType.DISCOVERY,
ContentType.ESTIMATES_VIEW_ACTUALS_ANNUAL: APIType.ESTIMATES,
ContentType.ESTIMATES_VIEW_ACTUALS_INTERIM: APIType.ESTIMATES,
ContentType.ESTIMATES_VIEW_ACTUALS_KPI_ANNUAL: APIType.ESTIMATES,
ContentType.ESTIMATES_VIEW_ACTUALS_KPI_INTERIM: APIType.ESTIMATES,
ContentType.ESTIMATES_VIEW_SUMMARY_ANNUAL: APIType.ESTIMATES,
ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_NON_PERIODIC_MEASURES: APIType.ESTIMATES,
ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_PERIODIC_MEASURES_ANNUAL: APIType.ESTIMATES,
ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_PERIODIC_MEASURES_INTERIM: APIType.ESTIMATES,
ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_RECOMMENDATIONS: APIType.ESTIMATES,
ContentType.ESTIMATES_VIEW_SUMMARY_INTERIM: APIType.ESTIMATES,
ContentType.ESTIMATES_VIEW_SUMMARY_NON_PERIODIC_MEASURES: APIType.ESTIMATES,
ContentType.ESTIMATES_VIEW_SUMMARY_RECOMMENDATIONS: APIType.ESTIMATES,
ContentType.ESTIMATES_VIEW_SUMMARY_KPI_ANNUAL: APIType.ESTIMATES,
ContentType.ESTIMATES_VIEW_SUMMARY_KPI_HISTORICAL_SNAPSHOTS_KPI: APIType.ESTIMATES,
ContentType.ESTIMATES_VIEW_SUMMARY_KPI_INTERIM: APIType.ESTIMATES,
ContentType.CUSTOM_INSTRUMENTS_INSTRUMENTS: APIType.CUSTOM_INSTRUMENTS,
ContentType.CUSTOM_INSTRUMENTS_SEARCH: APIType.CUSTOM_INSTRUMENTS,
ContentType.CUSTOM_INSTRUMENTS_EVENTS: APIType.CUSTOM_INSTRUMENTS,
ContentType.CUSTOM_INSTRUMENTS_INTRADAY_SUMMARIES: APIType.CUSTOM_INSTRUMENTS,
ContentType.CUSTOM_INSTRUMENTS_INTERDAY_SUMMARIES: APIType.CUSTOM_INSTRUMENTS,
ContentType.FILINGS_RETRIEVAL: APIType.FILINGS,
ContentType.FILINGS_SEARCH: APIType.DATA_STORE,
}
url_config_key_by_data_type = {
DataType.CFS_BUCKETS: "endpoints.buckets",
DataType.CFS_FILE_SETS: "endpoints.file-sets",
DataType.CFS_FILES: "endpoints.files",
DataType.CFS_PACKAGES: "endpoints.packages",
DataType.CFS_STREAM: "endpoints.files",
ContentType.BOND_CURVE: "endpoints.bond-curves.curves",
ContentType.FORWARD_CURVE: "endpoints.forward-curves",
ContentType.CROSS_CURRENCY_CURVES_CURVES: "endpoints.cross-currency-curves.curves",
ContentType.CROSS_CURRENCY_CURVES_DEFINITIONS_CREATE: "endpoints.cross-currency-curves.definitions.create",
ContentType.CROSS_CURRENCY_CURVES_DEFINITIONS_DELETE: "endpoints.cross-currency-curves.definitions.delete",
ContentType.CROSS_CURRENCY_CURVES_DEFINITIONS_GET: "endpoints.cross-currency-curves.definitions.get",
ContentType.CROSS_CURRENCY_CURVES_DEFINITIONS_UPDATE: "endpoints.cross-currency-curves.definitions.update",
ContentType.CROSS_CURRENCY_CURVES_DEFINITIONS_SEARCH: "endpoints.cross-currency-curves.definitions.search",
ContentType.CROSS_CURRENCY_CURVES_TRIANGULATE_DEFINITIONS_SEARCH: "endpoints.cross-currency-curves.triangulate-definitions.search",
ContentType.ZC_CURVES: "endpoints.zc-curves",
ContentType.DATES_AND_CALENDARS_ADD_PERIODS: "endpoints.add-periods",
ContentType.DATES_AND_CALENDARS_HOLIDAYS: "endpoints.holidays",
ContentType.DATES_AND_CALENDARS_COUNT_PERIODS: "endpoints.count-periods",
ContentType.DATES_AND_CALENDARS_DATE_SCHEDULE: "endpoints.date-schedule",
ContentType.DATES_AND_CALENDARS_IS_WORKING_DAY: "endpoints.is-working-day",
ContentType.ZC_CURVE_DEFINITIONS: "endpoints.zc-curve-definitions",
ContentType.SURFACES: "endpoints.surfaces",
ContentType.SURFACES_SWAPTION: "endpoints.surfaces",
ContentType.CONTRACTS: "endpoints.financial-contracts",
ContentType.HISTORICAL_PRICING_EVENTS: "endpoints.events",
ContentType.HISTORICAL_PRICING_INTERDAY_SUMMARIES: "endpoints.interday-summaries",
ContentType.HISTORICAL_PRICING_INTRADAY_SUMMARIES: "endpoints.intraday-summaries",
ContentType.ESG_STANDARD_SCORES: "endpoints.scores-standard",
ContentType.ESG_STANDARD_MEASURES: "endpoints.measures-standard",
ContentType.ESG_FULL_MEASURES: "endpoints.measures-full",
ContentType.ESG_FULL_SCORES: "endpoints.scores-full",
ContentType.ESG_BASIC_OVERVIEW: "endpoints.basic",
ContentType.ESG_UNIVERSE: "endpoints.universe",
ContentType.PRICING: "endpoints.snapshots",
ContentType.OWNERSHIP_CONSOLIDATED_BREAKDOWN: "endpoints.consolidated.breakdown",
ContentType.OWNERSHIP_CONSOLIDATED_CONCENTRATION: "endpoints.consolidated.concentration",
ContentType.OWNERSHIP_CONSOLIDATED_INVESTORS: "endpoints.consolidated.investors",
ContentType.OWNERSHIP_CONSOLIDATED_RECENT_ACTIVITY: "endpoints.consolidated.recent-activity",
ContentType.OWNERSHIP_CONSOLIDATED_SHAREHOLDERS_HISTORY_REPORT: "endpoints.consolidated.shareholders-history-report",
ContentType.OWNERSHIP_CONSOLIDATED_SHAREHOLDERS_REPORT: "endpoints.consolidated.shareholders-report",
ContentType.OWNERSHIP_CONSOLIDATED_TOP_N_CONCENTRATION: "endpoints.consolidated.top-n-concentration",
ContentType.OWNERSHIP_FUND_CONCENTRATION: "endpoints.fund.concentration",
ContentType.OWNERSHIP_FUND_BREAKDOWN: "endpoints.fund.breakdown",
ContentType.OWNERSHIP_FUND_INVESTORS: "endpoints.fund.investors",
ContentType.OWNERSHIP_FUND_RECENT_ACTIVITY: "endpoints.fund.recent-activity",
ContentType.OWNERSHIP_FUND_SHAREHOLDERS_HISTORY_REPORT: "endpoints.fund.shareholders-history-report",
ContentType.OWNERSHIP_FUND_SHAREHOLDERS_REPORT: "endpoints.fund.shareholders-report",
ContentType.OWNERSHIP_FUND_TOP_N_CONCENTRATION: "endpoints.fund.top-n-concentration",
ContentType.OWNERSHIP_FUND_HOLDINGS: "endpoints.fund.holdings",
ContentType.OWNERSHIP_INSIDER_SHAREHOLDERS_REPORT: "endpoints.insider.shareholders-report",
ContentType.OWNERSHIP_INSIDER_TRANSACTION_REPORT: "endpoints.insider.transaction-report",
ContentType.OWNERSHIP_INVESTOR_HOLDINGS: "endpoints.investor.holdings",
ContentType.OWNERSHIP_ORG_INFO: "endpoints.org-info",
ContentType.CHAINS: "endpoints.chains",
ContentType.DATA_GRID_RDP: "endpoints.standard",
ContentType.DATA_GRID_UDF: "endpoints.standard",
ContentType.NEWS_HEADLINES_RDP: "endpoints.headlines",
ContentType.NEWS_HEADLINES_UDF: "endpoints.headlines",
ContentType.NEWS_STORY_RDP: "endpoints.stories",
ContentType.NEWS_STORY_UDF: "endpoints.stories",
ContentType.NEWS_TOP_NEWS_HIERARCHY: "endpoints.top-news",
ContentType.NEWS_TOP_NEWS: "endpoints.top-news",
ContentType.NEWS_IMAGES: "endpoints.images",
ContentType.NEWS_ONLINE_REPORTS: "endpoints.online-reports",
ContentType.NEWS_ONLINE_REPORTS_HIERARCHY: "endpoints.online-reports",
ContentType.DISCOVERY_SEARCH: "endpoints.search",
ContentType.DISCOVERY_LOOKUP: "endpoints.lookup",
ContentType.DISCOVERY_METADATA: "endpoints.metadata",
ContentType.ESTIMATES_VIEW_ACTUALS_ANNUAL: "endpoints.view-actuals.annual",
ContentType.ESTIMATES_VIEW_ACTUALS_INTERIM: "endpoints.view-actuals.interim",
ContentType.ESTIMATES_VIEW_ACTUALS_KPI_ANNUAL: "endpoints.view-actuals-kpi.annual",
ContentType.ESTIMATES_VIEW_ACTUALS_KPI_INTERIM: "endpoints.view-actuals-kpi.interim",
ContentType.ESTIMATES_VIEW_SUMMARY_ANNUAL: "endpoints.view-summary.annual",
ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_NON_PERIODIC_MEASURES: "endpoints.view-summary.historical-snapshots-non-periodic-measures",
ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_PERIODIC_MEASURES_ANNUAL: "endpoints.view-summary.historical-snapshots-periodic-measures-annual",
ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_PERIODIC_MEASURES_INTERIM: "endpoints.view-summary.historical-snapshots-periodic-measures-interim",
ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_RECOMMENDATIONS: "endpoints.view-summary.historical-snapshots-recommendations",
ContentType.ESTIMATES_VIEW_SUMMARY_INTERIM: "endpoints.view-summary.interim",
ContentType.ESTIMATES_VIEW_SUMMARY_NON_PERIODIC_MEASURES: "endpoints.view-summary.non-periodic-measures",
ContentType.ESTIMATES_VIEW_SUMMARY_RECOMMENDATIONS: "endpoints.view-summary.recommendations",
ContentType.ESTIMATES_VIEW_SUMMARY_KPI_ANNUAL: "endpoints.view-summary-kpi.annual",
ContentType.ESTIMATES_VIEW_SUMMARY_KPI_HISTORICAL_SNAPSHOTS_KPI: "endpoints.view-summary-kpi.historical-snapshots-kpi",
ContentType.ESTIMATES_VIEW_SUMMARY_KPI_INTERIM: "endpoints.view-summary-kpi.interim",
ContentType.CUSTOM_INSTRUMENTS_INSTRUMENTS: "endpoints.instruments",
ContentType.CUSTOM_INSTRUMENTS_SEARCH: "endpoints.search",
ContentType.CUSTOM_INSTRUMENTS_EVENTS: "endpoints.events",
ContentType.CUSTOM_INSTRUMENTS_INTERDAY_SUMMARIES: "endpoints.interday-summaries",
ContentType.CUSTOM_INSTRUMENTS_INTRADAY_SUMMARIES: "endpoints.intraday-summaries",
ContentType.FILINGS_RETRIEVAL: "endpoints.retrieval",
ContentType.FILINGS_SEARCH: "endpoints.graphql",
}
def _get_api_config_key(data_type: DataType) -> str:
api_type = api_type_by_data_type.get(data_type)
api_config_key = api_config_key_by_api_type.get(api_type)
return api_config_key
def _get_url_config_key(data_type: DataType) -> str:
return url_config_key_by_data_type.get(data_type)
def get_api_config(data_type: Union[DataType, ContentType], config: "_RDPConfig") -> Union[dict, Any]:
api_config_key = _get_api_config_key(data_type)
api_config = config.get(api_config_key)
if api_config is None:
raise AttributeError(f"Cannot find api_key, data_type={data_type} by key={api_config_key}")
return api_config
def get_base_url(data_type: DataType, config: "_RDPConfig") -> str:
"""
Parameters
----------
config: _RDPConfig
data_type: DataType
Returns
-------
string
"""
api_config = get_api_config(data_type, config)
base_url = api_config.get("url")
return base_url
def get_url(
data_type: Union[DataType, ContentType],
config: "_RDPConfig",
request_mode: str = None,
) -> str:
"""
Parameters
----------
config: _RDPConfig
data_type: DataType
request_mode: str in ["sync", "async"]
Returns
-------
string
"""
api_config = get_api_config(data_type, config)
url_config_key = _get_url_config_key(data_type)
if request_mode is None:
request_mode = "sync"
if request_mode not in ["sync", "async"]:
raise AttributeError(f"Request mode not in ['sync', 'async'].")
# Test if url_content is a json with sync/async endpoints
url_config_key_with_request_mode = ".".join([url_config_key, request_mode])
content_url = api_config.get(url_config_key_with_request_mode)
if content_url is None:
# then test if content_url is a single endpoint
content_url = api_config.get(url_config_key)
if content_url is None:
raise AttributeError(f"Cannot find content_url, data_type={data_type} by key={url_config_key}")
base_url = api_config.get("url")
url = urljoin(base_url, content_url)
return url
def make_provider(data_type: Union[DataType, ContentType], **_) -> "DataProvider":
"""
Parameters
----------
data_type: DataType
Returns
-------
DataProvider
"""
data_provider = data_provider_by_data_type.get(data_type)
if data_provider is None:
raise AttributeError(f"Cannot get data provider by content type: {data_type}")
return data_provider | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/_data/_data_provider_factory.py | 0.672224 | 0.203411 | _data_provider_factory.py | pypi |
from typing import List, Optional, TYPE_CHECKING, Union, Set
from ._data_provider import DataProviderLayer, Response
from ._data_type import DataType
from ..._core.session import get_valid_session
from ..._tools import validate_endpoint_request_url_parameters
from ...usage_collection._filter_types import FilterType
from ...usage_collection._logger import get_usage_logger
from ...usage_collection._utils import ModuleName
if TYPE_CHECKING:
from ..._core.session._session import Session
from ._endpoint_data import RequestMethod
from ._data_provider import Response
class Definition(DataProviderLayer):
"""
Defines the wrapper around the data delivery mechanism of the Refinitiv Data Platform.
Parameters
----------
url : str
API endpoint URL.
method : RequestMethod, optional
HTTP request method.
path_parameters : dict, optional
Parameters that can be added to the endpoint URL.
query_parameters : dict, optional
HTTP request query parameters.
header_parameters : dict, optional
HTTP request header parameters.
body_parameters : dict, optional
HTTP request body parameters.
Examples
--------
>>> from refinitiv.data.delivery import endpoint_request
>>> definition_endpoint = endpoint_request.Definition("/data/news/v1/analyze")
"""
# Should not change even if class name is changed
_USAGE_CLS_NAME = "EndpointDefinition"
def __init__(
self,
url: str,
method: Union["RequestMethod", str, None] = None,
path_parameters: Optional[dict] = None,
query_parameters: Optional[dict] = None,
header_parameters: Optional[dict] = None,
body_parameters: Union[dict, List[dict], None] = None,
):
self.url = url
self.method = method
self.path_parameters = path_parameters
self.query_parameters = query_parameters
self.body_parameters = body_parameters
self.header_parameters = header_parameters
super().__init__(
data_type=DataType.ENDPOINT,
url=self.url,
method=self.method,
path_parameters=self.path_parameters,
query_parameters=self.query_parameters,
body_parameters=self.body_parameters,
header_parameters=self.header_parameters,
)
def get_data(self, session: Optional["Session"] = None) -> "Response":
"""
Send a request to the Refinitiv Data Platform API directly.
Parameters
----------
session : Session, optional
Session object. If it's not passed the default session will be used.
Returns
-------
Response
Examples
--------
>>> from refinitiv.data.delivery import endpoint_request
>>> definition_endpoint = endpoint_request.Definition("/data/news/v1/analyze")
>>> definition_endpoint.get_data()
"""
validate_endpoint_request_url_parameters(self.url, self.path_parameters)
session = get_valid_session(session)
self._log_usage(
f"{self.__class__.__module__}.{self.__class__.__qualname__}.get_data",
{FilterType.SYNC, FilterType.LAYER_DELIVERY, FilterType.REST},
)
response = self._provider.get_data(
session,
self.url,
method=self.method,
path_parameters=self.path_parameters,
query_parameters=self.query_parameters,
header_parameters=self.header_parameters,
body_parameters=self.body_parameters,
)
return response
async def get_data_async(self, session: Optional["Session"] = None) -> "Response":
"""
Sends an asynchronous request directly to the Refinitiv Data Platform API.
Parameters
----------
session : Session, optional
Session object. If it's not passed the default session will be used.
Returns
-------
Response
Examples
--------
>>> from refinitiv.data.delivery import endpoint_request
>>> definition_endpoint = endpoint_request.Definition("/data/news/v1/analyze")
>>> await definition_endpoint.get_data_async()
"""
validate_endpoint_request_url_parameters(self.url, self.path_parameters)
session = get_valid_session(session)
self._log_usage(
f"{self.__class__.__module__}.{self.__class__.__qualname__}.get_data_async",
{FilterType.ASYNC, FilterType.LAYER_DELIVERY, FilterType.REST},
)
response = await self._provider.get_data_async(
session,
self.url,
method=self.method,
path_parameters=self.path_parameters,
query_parameters=self.query_parameters,
header_parameters=self.header_parameters,
body_parameters=self.body_parameters,
)
return response
def _log_usage(self, name: str, filter_type: Set[FilterType]):
get_usage_logger().log_func(
name=f"{ModuleName.DELIVERY}.{self._USAGE_CLS_NAME}.{name}",
func_path=f"{self.__class__.__module__}.{self.__class__.__qualname__}.{name}",
kwargs=dict(
url=self.url,
method=self.method,
path_parameters=self.path_parameters,
query_parameters=self.query_parameters,
header_parameters=self.header_parameters,
body_parameters=self.body_parameters,
),
desc=filter_type,
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/_data/_endpoint_definition.py | 0.892986 | 0.20001 | _endpoint_definition.py | pypi |
from typing import TYPE_CHECKING, Union, List
if TYPE_CHECKING:
import httpx
class ParsedData:
def __init__(
self,
status: dict,
raw_response: "httpx.Response",
content_data: Union[dict, list, str] = None,
error_codes: Union[str, int, List[int]] = None,
error_messages: Union[str, List[str]] = None,
) -> None:
self.status = status
self.raw_response = raw_response
self.content_data = content_data
if isinstance(error_codes, (int, str)):
error_codes = [error_codes]
self._error_codes = error_codes or []
if isinstance(error_messages, str):
error_messages = [error_messages]
self._error_messages = error_messages or []
@property
def error_codes(self) -> List[int]:
return self._error_codes
@error_codes.setter
def error_codes(self, value: int):
if not isinstance(value, list):
value = [value]
self._error_codes = value
@property
def first_error_code(self) -> int:
if len(self._error_codes) > 0:
return self._error_codes[0]
return 0
@property
def error_messages(self) -> List[str]:
return self._error_messages
@error_messages.setter
def error_messages(self, value: str):
if not isinstance(value, list):
value = [value]
self._error_messages = value
@property
def first_error_message(self) -> str:
if len(self._error_messages) > 0:
return self._error_messages[0]
return ""
def as_dict(self) -> dict:
return {
"status": self.status,
"raw_response": self.raw_response,
"content_data": self.content_data,
"error_codes": self.error_codes,
"error_messages": self.error_messages,
}
def __eq__(self, o: object) -> bool:
if isinstance(o, dict):
return self.as_dict() == o
return super().__eq__(o)
def __repr__(self) -> str:
return str(self.as_dict()) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/_data/_parsed_data.py | 0.863852 | 0.240909 | _parsed_data.py | pypi |
from ._data_types import BucketData, FileData, FileSetData, PackageData
from .._data._data_provider_layer import get_data_by_data_type
from .._data._data_type import DataType
from ..._tools._utils import camel_to_snake
class BaseCFSObject:
def __init__(self, data, provider, session=None):
self._session = session
self._data = {camel_to_snake(key): value for key, value in data.items()}
self._child_objects = None
self._provider = provider
for name, value in self._data.items():
setattr(self, name, value)
def __getitem__(self, item):
return self._data[item]
def __iter__(self):
if self._child_type is None:
raise TypeError(f"object {self._provider.name, type(self)} is not iterable")
args = self._params
kwargs = {args[0]: self._data[args[1]]}
self._child_objects = get_data_by_data_type(data_type=self._child_type, session=self._session, **kwargs)
self._n = 0
return self
def __next__(self):
_iter_obj = self._child_objects.data._iter_object
if not _iter_obj:
raise StopIteration
if self._n < len(_iter_obj):
result = self._child_objects.data._iter_object[self._n]
self._n += 1
return result
raise StopIteration
def __repr__(self):
return f"{self.__class__.__name__}({self._data})"
@property
def _params(self):
raise NotImplementedError
@property
def _child_type(self):
raise NotImplementedError
class CFSFile(BaseCFSObject):
_child_type = None
_params = None
class CFSBucket(BaseCFSObject):
_child_type = DataType.CFS_FILE_SETS
_params = ("bucket", "name")
class CFSFileSet(BaseCFSObject):
_child_type = DataType.CFS_FILES
_params = ("fileset_id", "id")
class CFSPackage(BaseCFSObject):
_child_type = DataType.CFS_FILE_SETS
_params = ("package_id", "package_id")
def __init__(self, data, provider, session=None):
super().__init__(data, provider, session=session)
self._bucket_names = self._data.get("bucket_names", [])
def __iter__(self):
args = self._params
kwargs = {args[0]: self._data[args[1]]}
bucket_name = self._bucket_names[0] if self._bucket_names else []
self._child_objects = get_data_by_data_type(
data_type=self._child_type,
session=self._session,
bucket=bucket_name,
**kwargs,
)
self._n = 0
return self
def __next__(self):
_iter_obj = self._child_objects.data._iter_object
if not _iter_obj:
raise StopIteration
if self._n < len(_iter_obj):
result = self._child_objects.data._iter_object[self._n]
self._n += 1
return result
while self._bucket_names:
bucket = self._bucket_names.pop(0)
args = self._params
kwargs = {args[0]: self._data[args[1]], "bucket": bucket}
self._child_objects = get_data_by_data_type(self._child_type, self._session, **kwargs)
if not self._child_objects.errors and self._child_objects.data.raw["value"]:
self._n = 0
result = self._child_objects.data._iter_object[self._n]
self._n += 1
return result
raise StopIteration
class_by_type = {
BucketData: CFSBucket,
PackageData: CFSPackage,
FileSetData: CFSFileSet,
FileData: CFSFile,
}
class IterObj:
def __init__(self, value, session=None, provider=None):
_class = class_by_type.get(provider)
self._values = [_class(i, provider=provider, session=session) for i in value]
def __getitem__(self, item):
return self._values[item]
def __iter__(self):
self._n = 0
return self
def __next__(self):
if self._n < len(self._values):
result = self._values[self._n]
self._n += 1
return result
raise StopIteration
def __repr__(self):
return "\n".join([repr(i) for i in self._values])
def __len__(self):
return len(self._values) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/cfs/_iter_object.py | 0.602763 | 0.160595 | _iter_object.py | pypi |
from datetime import timedelta, datetime
from typing import Union, Optional
from ._tools import _convert_date_time
from .._data._data_provider_layer import DataProviderLayer
from .._data._data_type import DataType
from ..._tools import validate_types
class Definition(DataProviderLayer):
"""
Describes the files inside a particular file-set with all their attributes.
Parameters
__________
fileset_id : str
File set ID for searching.
file_name : str, optional
File name for partial match searching.
created_since : str or timedelta or datetime, optional
File creation date.
modified_since : str or timedelta or datetime, optional
File last modification date.
skip_token : str, optional
Skip token is only used if a previous operation returned a partial result. If a previous response
contains a nextLink element, the value of the nextLink element will include a skip token parameter that
specifies a starting point to use for subsequent calls.
page_size : int, optional
Number of buckets returned.
Methods
-------
get_data(session=session)
Returns a response to the data platform
get_data_async(session=None)
Returns a response asynchronously to the data platform
Examples
--------
>>> from refinitiv.data.delivery import cfs
>>> definition = cfs.files.Definition()
>>> files = definition.get_data()
Using get_data_async
>>> import asyncio
>>> task = definition.get_data_async()
>>> response = asyncio.run(task)
"""
def __init__(
self,
fileset_id: str,
file_name: Optional[str] = None,
created_since: Union[str, datetime, timedelta] = None,
modified_since: Union[str, datetime, timedelta] = None,
skip_token: Optional[str] = None,
page_size: int = 25,
):
validate_types(page_size, [int, type(None)], "page_size")
created_since = _convert_date_time(created_since)
modified_since = _convert_date_time(modified_since)
super().__init__(
data_type=DataType.CFS_FILES,
fileset_id=fileset_id,
file_name=file_name,
created_since=created_since,
modified_since=modified_since,
skip_token=skip_token,
page_size=page_size,
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/cfs/_files_definition.py | 0.921653 | 0.309571 | _files_definition.py | pypi |
from typing import TYPE_CHECKING, Optional
from ._tools import _convert_date_time
from .._data._data_provider_layer import DataProviderLayer
from .._data._data_type import DataType
from ..._tools import validate_types, attributes_arg_parser, try_copy_to_list
if TYPE_CHECKING:
from ..._types import OptStrStrs
class Definition(DataProviderLayer):
"""
Describes the parameters to retrieve the buckets from a client file store with all their attributes.
Parameters
__________
name : str, optional
Bucket name for partial match searching.
created_since : str, optional
Bucket creation date.
modified_since : str, optional
Bucket modification date.
available_from : str, optional
Bucket availability start date.
available_to : str, optional
Bucket availability end date.
attributes : list of str, optional
Publisher-defined bucket attributes.
page_size : int, optional
Number of buckets returned.
skip_token : str, optional
Skip token is only used if a previous operation returned a partial result. If a previous response
contains a nextLink element, the value of the nextLink element will include a skip token parameter that
specifies a starting point to use for subsequent calls.
Methods
-------
get_data(session=session)
Returns a response to the data platform
get_data_async(session=None)
Returns a response asynchronously to the data platform
Examples
--------
>>> from refinitiv.data.delivery import cfs
>>> definition = cfs.buckets.Definition()
>>> buckets = definition.get_data()
Using get_data_async
>>> import asyncio
>>> task = definition.get_data_async()
>>> response = asyncio.run(task)
"""
def __init__(
self,
name: Optional[str] = None,
created_since: Optional[str] = None,
modified_since: Optional[str] = None,
available_from: Optional[str] = None,
available_to: Optional[str] = None,
attributes: "OptStrStrs" = None,
page_size: int = 25,
skip_token: Optional[str] = None,
):
validate_types(page_size, [int], "page_size")
created_since = _convert_date_time(created_since)
modified_since = _convert_date_time(modified_since)
available_from = _convert_date_time(available_from)
available_to = _convert_date_time(available_to)
if attributes:
attributes = try_copy_to_list(attributes)
attributes = attributes_arg_parser.get_list(attributes)
attributes = ";".join(attributes)
super().__init__(
data_type=DataType.CFS_BUCKETS,
name=name,
created_since=created_since,
modified_since=modified_since,
available_from=available_from,
available_to=available_to,
attributes=attributes,
page_size=page_size,
skip_token=skip_token,
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/cfs/_buckets_definition.py | 0.920007 | 0.329257 | _buckets_definition.py | pypi |
from typing import Optional
from .._data._data_provider_layer import DataProviderLayer
from .._data._data_type import DataType
from ..._tools import validate_types
class Definition(DataProviderLayer):
"""
Describes the particular file packages inside the bucket.
Parameters
__________
package_name : str, optional
Package name for partial match searching.
package_id: str, optional
Package ID.
package_type : str, optional
Package type.
bucket_name : str, optional
Package bucket name.
page : int, optional
The offset number that determines how many pages should be returned.
included_total_result : bool, optional
The flag to indicate if total record count should be returned or not.
skip_token : str, optional
Skip token is only used if a previous operation returned a partial result. If a previous response
contains a nextLink element, the value of the nextLink element will include a skip token parameter that
specifies a starting point to use for subsequent calls.
page_size : int, optional
The number of package that will be returned into a single response.
included_entitlement_result : bool, optional
The flag that enables the entitlement checking on each package.
Methods
-------
get_data(session=session)
Returns a response to the data platform
get_data_async(session=None)
Returns a response asynchronously to the data platform
Examples
--------
>>> from refinitiv.data.delivery import cfs
>>> definition = cfs.packages.Definition()
>>> packages = definition.get_data()
Using get_data_async
>>> import asyncio
>>> task = definition.get_data_async()
>>> response = asyncio.run(task)
"""
def __init__(
self,
package_name: Optional[str] = None,
package_id: Optional[str] = None,
package_type: Optional[str] = None,
bucket_name: Optional[str] = None,
page: Optional[int] = None,
included_total_result: bool = False,
skip_token: Optional[str] = None,
page_size: int = 25,
included_entitlement_result: bool = False,
):
validate_types(page_size, [int, type(None)], "page_size")
super().__init__(
data_type=DataType.CFS_PACKAGES,
package_name=package_name,
_package_id=package_id,
package_type=package_type,
bucket_name=bucket_name,
page=page,
included_total_result=included_total_result,
skip_token=skip_token,
page_size=page_size,
included_entitlement_result=included_entitlement_result,
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/cfs/_packages_definition.py | 0.936227 | 0.444203 | _packages_definition.py | pypi |
from typing import Optional
from ._tools import _convert_date_time
from .._data._data_provider_layer import DataProviderLayer
from .._data._data_type import DataType
from ..._tools import validate_types
class Definition(DataProviderLayer):
"""
Describes the indivisible set of files inside a particular bucket with all their attributes.
Parameters
__________
bucket : str
The name of the bucket to retrieve file sets.
name : str, optional
Name of the file set.
attributes : dict, optional
List of publisher-defined key-value attributes. Each key-pair value is split by a colon. (e.g.
attributes=key1:val1,key2:val2).
package_id : str, optional
File set package ID.
status : str, optional
Filter file-set by status (Ready/Pending).
available_from : str, optional
File set availability start date.
available_to : str, optional
File set availability end date.
content_from : str, optional
Age of the content within the file, start date.
content_to : str, optional
Age of the content within the file, end date.
created_since : str, optional
File set creation date.
modified_since : str, optional
File set modification date.
skip_token : str, optional
Skip token is only used if a previous operation returned a partial result. If a previous response
contains a nextLink element, the value of the nextLink element will include a skip token parameter that
specifies a starting point to use for subsequent calls.
page_size : int, optional
Returned filesets number.
Methods
-------
get_data(session=session)
Returns a response to the data platform
get_data_async(session=None)
Returns a response asynchronously to the data platform
Examples
--------
>>> from refinitiv.data.delivery import cfs
>>> definition = cfs.file_sets.Definition()
>>> file_sets = definition.get_data()
Using get_data_async
>>> import asyncio
>>> task = definition.get_data_async()
>>> response = asyncio.run(task)
"""
def __init__(
self,
bucket: str,
name: Optional[str] = None,
attributes: Optional[dict] = None,
package_id: Optional[str] = None,
status: Optional[str] = None,
available_from: Optional[str] = None,
available_to: Optional[str] = None,
content_from: Optional[str] = None,
content_to: Optional[str] = None,
created_since: Optional[str] = None,
modified_since: Optional[str] = None,
skip_token: Optional[str] = None,
page_size: int = 25,
):
validate_types(page_size, [int], "page_size")
created_since = _convert_date_time(created_since)
modified_since = _convert_date_time(modified_since)
available_from = _convert_date_time(available_from)
available_to = _convert_date_time(available_to)
content_from = _convert_date_time(content_from)
content_to = _convert_date_time(content_to)
if attributes is not None:
attributes = ",".join([f"{key}:{value}" for key, value in attributes.items()])
super().__init__(
data_type=DataType.CFS_FILE_SETS,
bucket=bucket,
name=name,
attributes=attributes,
package_id=package_id,
status=status,
available_from=available_from,
available_to=available_to,
content_from=content_from,
content_to=content_to,
created_since=created_since,
modified_since=modified_since,
skip_token=skip_token,
page_size=page_size,
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/delivery/cfs/_file_sets_definition.py | 0.935117 | 0.411525 | _file_sets_definition.py | pypi |
import asyncio
import threading
from typing import List, Optional, TYPE_CHECKING
import requests
from ._session_type import SessionType
from ..._open_state import OpenState
if TYPE_CHECKING:
from . import Session
codes = requests.codes
UNAUTHORIZED_CODES = {codes.bad, codes.unauthorized, codes.forbidden}
def is_desktop_session(session: "Session") -> bool:
return session.type == SessionType.DESKTOP
def is_platform_session(session: "Session") -> bool:
return session.type == SessionType.PLATFORM
def is_open(session: "Session") -> bool:
return session.open_state is OpenState.Opened
def is_closed(session: "Session") -> bool:
return session.open_state is OpenState.Closed
def raise_if_closed(session: "Session"):
if is_closed(session):
error_message = "Session is not opened. Can't send any request"
session.error(error_message)
raise ValueError(error_message)
def handle_exception(task):
exception = None
try:
exception = task.exception()
except asyncio.CancelledError:
pass
if exception:
raise exception
class NullResponse:
text = ""
status_code = 0
def json(self):
return {}
class Delays:
def __init__(self, delays: List[int]) -> None:
self._delays = delays
self._index = 0
def next(self) -> int:
if self._index >= len(self._delays):
self._index = len(self._delays) - 1
delay = self._delays[self._index]
self._index += 1
return delay
def reset(self):
self._index = 0
def __len__(self):
return len(self._delays)
SECONDS_5 = 5
SECONDS_10 = 10
SECONDS_15 = 15
MINUTE_1 = 60
MINUTES_5 = 5 * MINUTE_1
MINUTES_10 = 10 * MINUTE_1
MINUTES_15 = 15 * MINUTE_1
HOUR_1 = 60 * MINUTE_1
HOURS_2 = 2 * HOUR_1
def get_delays() -> Delays:
delays = Delays(
[
SECONDS_5,
SECONDS_10,
SECONDS_15,
MINUTE_1,
]
)
return delays
class Daemon(threading.Thread):
def __init__(self, interval, name: Optional[str] = None) -> None:
threading.Thread.__init__(self, name, daemon=True)
self.finished = threading.Event()
self.interval = interval
def cancel(self):
self.finished.set()
def run(self):
while not self.finished.is_set():
self.finished.wait(self.interval)
class Sensitive(str):
def __repr__(self):
return "********" | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/_core/session/tools.py | 0.673943 | 0.173148 | tools.py | pypi |
from typing import Optional
from ._session_type import SessionType
from ... import _configure as configure
def _retrieve_config_and_set_type(session_name, session_type):
if session_type == SessionType.PLATFORM:
session_config = configure.get(configure.keys.platform_session(session_name), {})
elif session_type == SessionType.DESKTOP:
session_config = configure.get(configure.keys.desktop_session(session_name), {})
else:
raise TypeError(f"Invalid session type: {session_type}, please set 'desktop' or 'platform'.")
if not session_config:
raise ValueError(f"Can't get config by name: {session_name}. Please check config name")
return session_config
def _get_session_type_and_name(config_path: str):
from ._session_provider import get_session_type
try:
config_path = config_path or configure.get("sessions.default")
session_type, session_name = config_path.split(".")
except ValueError:
raise ValueError(
"Please check your 'session_name'. It should be in the following way: 'session_type.session_name'"
)
except AttributeError:
raise AttributeError("Invalid type, please provide string")
return session_name, get_session_type(session_type)
class Definition(object):
"""
Defines the sessions (either desktop sessions or platform sessions).
Parameters
----------
name: str, optional
Name of the session to create. This name must refer to a session and the
related parameters defined in the configuration file of the library.
For example:
platform.my-session
Raises
----------
Exception
1. If user provided invalid session type in session name.
Type should be 'platform' or 'desktop'
2. If app-key not found in config and arguments.
Examples
--------
>>> import refinitiv.data as rd
>>> platform_session = rd.session.Definition(name="platform.my-session").get_session()
"""
def __init__(self, name: Optional[str] = None) -> None:
from ._session_provider import _make_session_provider_by_arguments
self._create_session = _make_session_provider_by_arguments(name)
def get_session(self):
"""
Creates and returns the session.
Returns
-------
The instance of the defined session.
"""
session = self._create_session()
return session | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/_core/session/_session_definition.py | 0.737158 | 0.166709 | _session_definition.py | pypi |
import string
from enum import Enum
from typing import Any, Dict, List, Set, Tuple, Union
TRUTH_TEXT = frozenset(("t", "true", "y", "yes", "on", "1"))
FALSE_TEXT = frozenset(("f", "false", "n", "no", "off", "0", ""))
PROTECTED_KEYS = frozenset(("secret", "password", "passwd", "pwd", "token"))
InterpolateType = Union[bool, Dict[str, str]]
class InterpolateEnumType(Enum):
"""Interpolation Method."""
# standard matching
STANDARD = 0
# interpolation will look through lower levels to attempt to resolve variables.
# This is particularly useful for templating
DEEP = 1
# similar to DEEP, but interpolating will not backtrack levels.
# That is, lower levels cannot use values from higher levels.
DEEP_NO_BACKTRACK = 2
class AttributeDict(dict):
"""Dictionary subclass enabling attribute lookup/assignment of keys/values."""
def __getattr__(self, key: Any) -> Any: # noqa: D105
try:
return self[key]
except KeyError:
# to conform with __getattr__ spec
raise AttributeError(key)
def __setattr__(self, key: Any, value: Any) -> None: # noqa: D105
self[key] = value
def as_bool(s: Any) -> bool:
"""
Boolean value from an object.
Return the boolean value ``True`` if the case-lowered value of string
input ``s`` is a `truthy string`. If ``s`` is already one of the
boolean values ``True`` or ``False``, return it.
"""
if s is None:
return False
if isinstance(s, bool):
return s
s = str(s).strip().lower()
if s not in TRUTH_TEXT and s not in FALSE_TEXT:
raise ValueError("Expected a valid True or False expression.")
return s in TRUTH_TEXT
def clean(key: str, value: Any, mask: str = "******") -> Any:
"""
Mask a value if needed.
:param key: key
:param value: value to hide
:param mask: string to use in case value should be hidden
:return: clear value or mask
"""
key = key.lower()
# check for protected keys
for pk in PROTECTED_KEYS:
if pk in key:
return mask
# urls
if isinstance(value, str) and "://" in value:
from urllib.parse import urlparse
url = urlparse(value)
if url.password is None:
return value
else:
return url._replace(netloc="{}:{}@{}".format(url.username, mask, url.hostname)).geturl()
return value
def interpolate_standard(text: str, d: dict, found: Set[Tuple[str, ...]]) -> str:
"""
Return the string interpolated as many times as needed.
:param text: string possibly containing an interpolation pattern
:param d: dictionary
:param found: variables found so far
"""
if not isinstance(text, str):
return text
variables = tuple(sorted(x[1] for x in string.Formatter().parse(text) if x[1] is not None))
if not variables:
return text
if variables in found:
raise ValueError("Cycle detected while interpolating keys")
else:
found.add(variables)
interpolated = {v: interpolate_standard(d[v], d, found) for v in variables}
return text.format(**interpolated)
def interpolate_deep(
attr: str,
text: str,
d: List[dict],
resolved: Dict[str, str],
levels: Dict[str, int],
method: InterpolateEnumType,
) -> str:
"""
Return the string interpolated as many times as needed.
:param attr: attribute name
:param text: string possibly containing an interpolation pattern
:param d: dictionary
:param resolved: variables resolved so far
:param levels: last level to read the variable from
"""
if not isinstance(text, str):
return text
variables = {x[1] for x in string.Formatter().parse(text) if x[1] is not None}
if not variables:
return text
length = len(d)
for variable in variables.difference(resolved.keys()):
# start at 1 if this is the intended attribute
level = levels.setdefault(variable, 1 if variable == attr else 0)
# get the first level for which the variable is defined
if level == length:
raise KeyError(variable)
for i, dict_ in enumerate(d[level:]):
if variable in dict_:
level = level + i
break
else:
raise KeyError(variable)
levels[variable] = level + 1
new_d = ([{}] * level) + d[level:] if method == InterpolateEnumType.DEEP_NO_BACKTRACK else d
resolved[variable] = interpolate_deep(attr, d[level][variable], new_d, resolved, levels, method)
return text.format(**resolved)
def flatten(d: List[dict]) -> dict:
"""
Flatten a list of dictionaries.
:param d: dictionary list
"""
result = {}
[result.update(dict_) for dict_ in d[::-1]]
return result
def interpolate_object(attr: str, obj: Any, d: List[dict], method: InterpolateEnumType) -> Any:
"""
Return the interpolated object.
:param attr: attribute name
:param obj: object to interpolate
:param d: dictionary
:param method: interpolation method
"""
if isinstance(obj, str):
if method == InterpolateEnumType.STANDARD:
return interpolate_standard(obj, flatten(d), set())
elif method in (
InterpolateEnumType.DEEP,
InterpolateEnumType.DEEP_NO_BACKTRACK,
):
return interpolate_deep(attr, obj, d, {}, {}, method)
else:
raise ValueError('Invalid interpolation method "%s"' % method)
elif hasattr(obj, "__iter__"):
if isinstance(obj, tuple):
return tuple(interpolate_object(attr, x, d, method) for x in obj)
else:
return [interpolate_object(attr, x, d, method) for x in obj]
else:
return obj | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/_external_libraries/python_configuration/helpers.py | 0.889187 | 0.348451 | helpers.py | pypi |
import base64
from contextlib import contextmanager
from copy import deepcopy
from sys import version_info
from typing import (
Any,
Dict,
ItemsView,
Iterator,
KeysView,
List,
Mapping,
Optional,
Tuple,
Union,
ValuesView,
cast,
)
from .helpers import (
AttributeDict,
InterpolateEnumType,
InterpolateType,
as_bool,
clean,
interpolate_object,
)
if version_info < (3, 8): # pragma: no cover
from collections import OrderedDict
class Configuration:
"""
Configuration class.
The Configuration class takes a dictionary input with keys such as
- ``a1.b1.c1``
- ``a1.b1.c2``
- ``a1.b2.c1``
- ``a1.b2.c2``
- ``a2.b1.c1``
- ``a2.b1.c2``
- ``a2.b2.c1``
- ``a2.b2.c2``
"""
def __init__(
self,
config_: Mapping[str, Any],
lowercase_keys: bool = False,
interpolate: InterpolateType = False,
interpolate_type: InterpolateEnumType = InterpolateEnumType.STANDARD,
):
"""
Constructor.
:param config_: a mapping of configuration values. Keys need to be strings.
:param lowercase_keys: whether to convert every key to lower case.
"""
self._lowercase = lowercase_keys
self._interpolate = {} if interpolate is True else interpolate
self._interpolate_type = interpolate_type
self._config: Dict[str, Any] = self._flatten_dict(config_)
self._default_levels: Optional[int] = 1
def __eq__(self, other): # type: ignore
"""Equality operator."""
return self.as_dict() == Configuration(other).as_dict()
def _filter_dict(self, d: Dict[str, Any], prefix: str) -> Dict[str, Any]:
"""
Filter a dictionary and return the items that are prefixed by :attr:`prefix`.
:param d: dictionary
:param prefix: prefix to filter on
"""
if self._lowercase:
return {
k[(len(prefix) + 1) :].lower(): v
for k, v in d.items()
for k, v in d.items()
if k.startswith(prefix + ".")
}
else:
return {k[(len(prefix) + 1) :]: v for k, v in d.items() if k.startswith(prefix + ".")}
def _flatten_dict(self, d: Mapping[str, Any]) -> Dict[str, Any]:
"""
Flatten one level of a dictionary.
:param d: dict
:return: a flattened dict
"""
nested = {k for k, v in d.items() if isinstance(v, (dict, Configuration))}
if self._lowercase:
result = {k.lower() + "." + ki: vi for k in nested for ki, vi in self._flatten_dict(d[k]).items()}
result.update((k.lower(), v) for k, v in d.items() if not isinstance(v, (dict, Configuration)))
else:
result = {k + "." + ki: vi for k in nested for ki, vi in self._flatten_dict(d[k]).items()}
result.update((k, v) for k, v in d.items() if not isinstance(v, (dict, Configuration)))
return result
def _get_subset(self, prefix: str) -> Union[Dict[str, Any], Any]:
"""
Return the subset of the config dictionary whose keys start with :attr:`prefix`.
:param prefix: string
:return: dict
"""
d = {k[(len(prefix) + 1) :]: v for k, v in self._config.items() if k.startswith(prefix + ".")}
if not d:
prefixes = prefix.split(".")
if len(prefixes) == 1:
return deepcopy(self._config.get(prefix, {}))
d = self._config
while prefixes: # pragma: no branches
p = prefixes[0]
new_d = self._filter_dict(d, p)
if new_d == {}:
return deepcopy(d.get(p, {}) if len(prefixes) == 1 else {})
d = new_d
prefixes = prefixes[1:]
return deepcopy(d)
def __getitem__(self, item: str) -> Union["Configuration", Any]: # noqa: D105
v = self._get_subset(item)
if v == {}:
raise KeyError(item)
if isinstance(v, dict):
return Configuration(v)
elif self._interpolate is not False:
d = self.as_dict()
d.update(cast(Dict[str, str], self._interpolate))
return interpolate_object(item, v, [d], self._interpolate_type)
else:
return v
def __getattr__(self, item: str) -> Any: # noqa: D105
try:
return self[item]
except KeyError:
raise AttributeError(item)
def get(self, key: str, default: Any = None) -> Union[dict, Any]:
"""
Get the configuration values corresponding to :attr:`key`.
:param key: key to retrieve
:param default: default value in case the key is missing
:return: the value found or a default
"""
return self.as_dict().get(key, default)
def as_dict(self) -> dict:
"""Return the representation as a dictionary."""
return self._config
def as_attrdict(self) -> AttributeDict:
"""Return the representation as an attribute dictionary."""
return AttributeDict(
{x: Configuration(v).as_attrdict() if isinstance(v, dict) else v for x, v in self.items(levels=1)}
)
def get_bool(self, item: str) -> bool:
"""
Get the item value as a bool.
:param item: key
"""
return as_bool(self[item])
def get_str(self, item: str, fmt: str = "{}") -> str:
"""
Get the item value as an int.
:param item: key
:param fmt: format to use
"""
return fmt.format(self[item])
def get_int(self, item: str) -> int:
"""
Get the item value as an int.
:param item: key
"""
return int(self[item])
def get_float(self, item: str) -> float:
"""
Get the item value as a float.
:param item: key
"""
return float(self[item])
def get_list(self, item: str) -> List[Any]:
"""
Get the item value as a list.
:param item: key
"""
return list(self[item])
def get_dict(self, item: str) -> dict:
"""
Get the item values as a dictionary.
:param item: key
"""
return dict(self._get_subset(item))
def base64encode(self, item: str) -> bytes:
"""
Get the item value as a Base64 encoded bytes instance.
:param item: key
"""
b = self[item]
b = b if isinstance(b, bytes) else b.encode()
return base64.b64encode(b)
def base64decode(self, item: str) -> bytes:
"""
Get the item value as a Base64 decoded bytes instance.
:param item: key
"""
b = self[item]
b = b if isinstance(b, bytes) else b.encode()
return base64.b64decode(b, validate=True)
def keys(self, levels: Optional[int] = None) -> Union["Configuration", Any, KeysView[str]]:
"""Return a set-like object providing a view on the configuration keys."""
assert levels is None or levels > 0
levels = self._default_levels if levels is None else levels
try:
return self["keys"] # don't filter levels, existing attribute
except KeyError:
return cast(
KeysView[str],
list({".".join(x.split(".")[:levels]) for x in set(self.as_dict().keys())}),
)
def values(self, levels: Optional[int] = None) -> Union["Configuration", Any, ValuesView[Any]]:
"""Return a set-like object providing a view on the configuration values."""
assert levels is None or levels > 0
levels = self._default_levels if levels is None else levels
try:
return self["values"]
except KeyError:
return dict(self.items(levels=levels)).values()
def items(self, levels: Optional[int] = None) -> Union["Configuration", Any, ItemsView[str, Any]]:
"""Return a set-like object providing a view on the configuration items."""
assert levels is None or levels > 0
levels = self._default_levels if levels is None else levels
try:
return self["items"]
except KeyError:
keys = cast(KeysView[str], self.keys(levels=levels))
return {k: self._get_subset(k) for k in keys}.items()
def __iter__(self) -> Iterator[Tuple[str, Any]]: # noqa: D105
return iter(dict(self.items())) # type: ignore
def __reversed__(self) -> Iterator[Tuple[str, Any]]: # noqa: D105
if version_info < (3, 8):
return OrderedDict(reversed(list(self.items()))) # type: ignore # pragma: no cover
else:
return reversed(dict(self.items())) # type: ignore
def __len__(self) -> int: # noqa: D105
return len(self.keys())
def __setitem__(self, key: str, value: Any) -> None: # noqa: D105
self.update({key: value})
def __delitem__(self, prefix: str) -> None: # noqa: D105
"""
Filter a dictionary and delete the items that are prefixed by :attr:`prefix`.
:param prefix: prefix to filter on to delete keys
"""
remove = []
for k in self._config:
kl = k.lower() if self._lowercase else k
if kl == prefix or kl.startswith(prefix + "."):
remove.append(k)
if not remove:
raise KeyError("No key with prefix '%s' found." % prefix)
for k in remove:
del self._config[k]
def __contains__(self, prefix: str) -> bool: # noqa: D105
try:
self[prefix]
return True
except KeyError:
return False
def clear(self) -> None:
"""Remove all items."""
self._config.clear()
def copy(self) -> "Configuration":
"""Return shallow copy."""
return Configuration(self._config)
def pop(self, prefix: str, value: Any = None) -> Any: # NOSONAR
"""
Remove keys with the specified prefix and return the corresponding value.
If the prefix is not found a KeyError is raised.
"""
try:
value = self[prefix]
del self[prefix]
except KeyError:
if value is None:
raise
return value
def setdefault(self, key: str, default: Any = None) -> Any:
"""
Insert key with a value of default if key is not in the Configuration.
Return the value for key if key is in the Configuration, else default.
"""
try:
return self[key]
except KeyError:
self[key] = default
return self[key]
def update(self, other: Mapping[str, Any]) -> None:
"""Update the Configuration with another Configuration object or Mapping."""
self._config.update(self._flatten_dict(other))
def reload(self) -> None: # pragma: no cover
"""
Reload the configuration.
This method is not implemented for simple Configuration objects and is
intended only to be used in subclasses.
"""
raise NotImplementedError()
@contextmanager
def dotted_iter(self) -> Iterator["Configuration"]:
"""
Context manager for dotted iteration.
This context manager changes all the iterator-related functions
to include every nested (dotted) key instead of just the top level.
"""
self._default_levels = None
try:
yield self
finally:
self._default_levels = 1
def __repr__(self) -> str: # noqa: D105
return "<Configuration: %s>" % hex(id(self))
def __str__(self) -> str: # noqa: D105
return str({k: clean(k, v) for k, v in sorted(self.as_dict().items())}) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/_external_libraries/python_configuration/configuration.py | 0.763748 | 0.253445 | configuration.py | pypi |
import re
from typing import List, Union, TYPE_CHECKING
import numpy as np
import pandas as pd
from pandas.core.dtypes.base import ExtensionDtype
from pandas.core.dtypes.cast import convert_dtypes as pandas_covert_dtypes
from pandas.core.dtypes.common import is_object_dtype, is_datetime64_any_dtype, pandas_dtype
from .._types import TimestampOrNaT
if TYPE_CHECKING:
from pandas.core.series import Series
DtypeObj = Union[np.dtype, ExtensionDtype]
def set_df_column_value(df: pd.DataFrame, loc: int, value) -> pd.DataFrame:
"""
Set the given value in the column with position 'loc'.
Library pandas changed `iloc` property and recommends using `isetitem`
from 1.5.0 version.
More details:
https://pandas.pydata.org/docs/whatsnew/v1.5.0.html
Link for isetitem method:
https://pandas.pydata.org/docs/reference/api/pandas.DataFrame.isetitem.html
Parameters
----------
df : pd.DataFrame
Pandas dataframe to convert.
loc : int
Index for current column, when we need update value.
value : scalar or arraylike
New value for column.
Returns
-------
pd.DataFrame
Modified dataframe
"""
if hasattr(df, "isetitem"):
df.isetitem(loc, value)
else:
df.iloc[:, loc] = value
return df
def convert_df_columns_to_datetime(
df: pd.DataFrame, entry: str, utc: bool = None, delete_tz: bool = False
) -> pd.DataFrame:
"""Converts particular dataframe columns to datetime according the pattern.
Converts particular dataframe column or columns if one of more columns
matches the pattern, returns same dataframe otherwise.
Parameters
----------
df : pd.DataFrame
Pandas dataframe to convert.
entry: str
Pattern to find a column to convert.
utc : bool
Convert to UTC if True.
delete_tz : bool
Convert to timezone-unaware if True.
Returns
-------
pd.DataFrame
Converted dataframe
"""
columns_indexes = [index for index, name in enumerate(df.columns.values) if entry.lower() in name.lower()]
return convert_df_columns_to_datetime_by_idx(df, columns_indexes, utc, delete_tz)
def convert_df_columns_to_datetime_by_idx(
df: pd.DataFrame,
columns_indexes: List[int],
utc: bool = None,
delete_tz: bool = False,
):
"""Convert dataframe columns to datetime by index.
Parameters
----------
df : pd.Dataframe
Pandas dataframe to convert.
columns_indexes : List[int]
List of indexes of columns to convert.
utc : bool
Convert to UTC if True.
delete_tz : bool
Convert to timezone-unaware if True.
Returns
-------
df
Converted dataframe.
"""
for idx in columns_indexes:
date_value = pd.to_datetime(df.iloc[:, idx], utc=utc, errors="coerce")
set_df_column_value(df, idx, date_value)
if delete_tz:
date_value = df.iloc[:, idx].dt.tz_localize(None)
set_df_column_value(df, idx, date_value)
return df
def convert_df_columns_to_datetime_re(df: pd.DataFrame, pattern: re.compile) -> pd.DataFrame:
"""Convert dataframe columns to datetime using regular expression pattern.
Parameters
----------
df : pd.Dataframe
Pandas dataframe to convert.
pattern : re.compile
Regular expression pattern to check columns.
Returns
-------
df
Converted dataframe
"""
column_indexes = [index for index, name in enumerate(df.columns.values) if pattern.search(name)]
return convert_df_columns_to_datetime_by_idx(df, column_indexes, utc=True, delete_tz=True)
def convert_str_to_timestamp(s: str) -> TimestampOrNaT:
timestamp = pd.to_datetime(s, utc=True, errors="coerce")
localized = timestamp.tz_localize(None)
return localized
def sort_df_by_universe(df: pd.DataFrame, universe: List[str]) -> pd.DataFrame:
length = len(universe)
if length == 1:
return df
columns = df.columns
def make_getidx():
get_index = universe.index
if isinstance(columns, pd.MultiIndex):
def geti(i):
return i[0]
else:
def geti(i):
return i
def inner(i):
try:
index = get_index(geti(i))
except ValueError:
index = length
return index
return inner
getidx = make_getidx()
# [3, 0, 2, 1]
curr_order = [getidx(col) for col in columns]
# [0, 1, 2, 3]
expected_order = list(range(length))
if curr_order != expected_order:
sorted_columns = (col for _, col in sorted(zip(curr_order, columns)))
df = df.reindex(columns=sorted_columns)
return df
def convert_dtypes(df: pd.DataFrame) -> pd.DataFrame:
"""
This function is an extension to the standard pandas.DataFrame.convert_dtypes.
Correct return dataframe if we have this columns in dataframe:
GOOG.O Currency
Date
2020-12-31 00:00:00+00:00 <NA>
2020-12-31 00:00:00+00:00 <NA>
Correct convert None, np.nan, pd.NA, pd.NaN to pd.NA, see official docs:
https://pandas.pydata.org/pandas-docs/stable/user_guide/missing_data.html#missing-data-na
Correct convert big int from Linux, Windows platform.
Parameters
----------
df: pd.DataFrame
Returns
-------
pd.DataFrame
"""
if df.empty:
return df
df = df.fillna(np.nan)
columns_indexes = [index for index, _ in enumerate(df.columns.values)]
for index in columns_indexes:
series = df.iloc[:, index]
if is_datetime64_any_dtype(series.dtype):
continue
series = series.infer_objects()
if is_object_dtype(series):
series = series.copy()
inferred_dtype = _get_inferred_dtype(series)
if str(inferred_dtype).lower() == "object":
new_series = series.copy()
new_series.replace("", np.nan, inplace=True)
inferred_dtype = _get_inferred_dtype(new_series)
if str(inferred_dtype).lower() != "object":
series = new_series
result = series.astype(inferred_dtype)
set_df_column_value(df, index, result)
df.fillna(pd.NA, inplace=True)
return df
def _get_inferred_dtype(series: "Series") -> DtypeObj:
inferred_dtype = pandas_covert_dtypes(series.values)
if "float" in str(inferred_dtype).lower() and (series.fillna(-9999) % 1 == 0).all():
return pandas_dtype("Int64")
return inferred_dtype | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/_tools/_dataframe.py | 0.89909 | 0.513363 | _dataframe.py | pypi |
import string
from functools import lru_cache
from typing import Set, Tuple
from jinja2 import Environment, nodes
from refinitiv.data._tools._common import cached_property
class OldStyleStringTemplate(string.Template):
"""string.Template with extended validation capabilities
This template must be used directly or as a base for templates in refinitiv.data
JSON configuration file.
Default delimiter is '#'. Variable syntax: '#var' or '#{var}'.
"""
delimiter = "#"
braceidpattern = r"(?a:[_a-z][_.|a-z0-9]*)"
def names(self) -> Set[str]:
"""Get names of substitution variables in pattern"""
return {
match_tuple[1] or match_tuple[2] # named or braced
for match_tuple in self.pattern.findall(self.template)
if match_tuple[1] or match_tuple[2]
}
class StringTemplate:
"""Jinja-based string template"""
def __init__(self, source):
self.env = Environment(
block_start_string="{{",
block_end_string="}}",
variable_start_string="#{",
variable_end_string="}",
)
self.env.filters["in"] = lambda inp: ", ".join(map(repr, inp))
self.source = source
@property
def template(self):
return self.source
@staticmethod
@lru_cache(128)
def get_vars_from_node(node: nodes.Node) -> Set[str]:
vars_set = set()
iter_list = [(node,)]
while iter_list:
_node, *_attrs = iter_list.pop(0)
if isinstance(_node, nodes.Getattr):
_attrs = (_node.attr, *_attrs)
elif isinstance(_node, nodes.Name):
vars_set.add(".".join(map(str, (_node.name, *_attrs))))
iter_list.extend((i, *_attrs) for i in _node.iter_child_nodes())
return vars_set
def placeholders(self):
return self.get_vars_from_node(self._parsed)
@cached_property
def _parsed(self):
return self.env.parse(self.source)
def validate(self):
self._parsed
def substitute(self, **kwargs):
return self.env.from_string(self.source).render(**kwargs)
class InvalidPlaceholderError(ValueError):
"""Exception to display syntax errors in string templates"""
def __init__(
self,
index: int,
template_text: str,
limit: int = 80,
padding: int = 0,
prefix: str = "",
):
"""
Parameters
----------
index : int
index of wrong placeholder start, get it from regex match
template_text
original text template
limit : int
maximum length of error message
padding : int
padding around place where invalid placeholder starts in error message text
prefix : int
prefix before error message, for additional information
"""
self.limit = limit
self.padding = padding
self.index = index
self.template_text = template_text
self.prefix = prefix
def __str__(self):
line_index, col_index = index_to_line_and_col(self.index, self.template_text)
target_line = self.template_text.splitlines()[line_index]
target_line, shift = shorten_string_to_include_position(target_line, self.limit, col_index, self.padding)
return "\n".join(
[
f"{self.prefix}Invalid placeholder in the template string: "
f"line {line_index + 1}, col {col_index + 1}:",
target_line,
"-" * (col_index - shift) + "^",
]
)
def index_to_line_and_col(index: int, target_string: str) -> Tuple[int, int]:
"""Convert position index in multiline string to line and column
Parameters
----------
index : int
index of symbol in target string
target_string : str
target string
Returns
-------
Tuple of line index and column index of given symbol, starting from zero
"""
lines = target_string[:index].splitlines(keepends=True)
if not lines:
return 0, 0
col_index = index - len("".join(lines[:-1]))
line_index = len(lines) - 1
return line_index, col_index
def shorten_string_to_include_position(line: str, limit: int, pos: int, padding: int = 0) -> Tuple[str, int]:
"""Shorten string to given limit to include given position
Can be used when we need to display position in a string when screen width is
limited.
Parameters
----------
line : str
target string
limit : int
maximum length of resulting string
pos : int
position in string that must be included in shortened string, starting from 0
padding
number of symbols left and right of pos that also must be included
Returns
-------
Tuple of shortened string and number of symbols removed from the start
"""
cur_right = pos + padding
if padding >= len(line):
raise ValueError("padding must be less than the length of line")
if len(line) <= limit:
return line, 0
if cur_right < limit:
return line[:limit], 0
left = max(cur_right - limit + 1, 0)
return line[left : cur_right + 1], left | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/_tools/templates.py | 0.900617 | 0.330863 | templates.py | pypi |
import inspect
import re
from typing import Tuple
from urllib.parse import ParseResult, urlparse, ParseResultBytes
pattern_1 = re.compile("(.)([A-Z][a-z]+)")
pattern_2 = re.compile("([a-z0-9])([A-Z])")
def camel_to_snake(s):
if not s:
return s
if "_" in s:
words = [camel_to_snake(w) for w in s.split("_")]
s = "_".join(words)
else:
s = pattern_1.sub(r"\1_\2", s)
s = pattern_2.sub(r"\1_\2", s)
return s.lower()
def parse_url(url: str) -> ParseResult:
import sys
py_ver = sys.version_info
if py_ver.major == 3 and (py_ver.minor <= 11 or (py_ver.minor == 11 and py_ver.micro < 1)):
result_urlparse = urlparse(url)
if isinstance(result_urlparse, ParseResultBytes):
return result_urlparse
scheme = result_urlparse.scheme
netloc = result_urlparse.netloc
path = result_urlparse.path
query = result_urlparse.query
fragment = result_urlparse.fragment
i = path.find(":")
if not scheme and path and i > 0:
scheme, path = path[:i].lower(), path[i + 1 :]
if scheme and (not scheme[0].isascii() or not scheme[0].isalpha()):
path = f"{scheme}:{path}" if path else scheme
scheme = ""
result = ParseResult(
scheme=scheme,
netloc=netloc,
path=path,
params=result_urlparse.params,
query=query,
fragment=fragment,
)
else:
result = urlparse(url)
return result
def validate_endpoint_request_url_parameters(url, path_parameters):
if url == "":
raise ValueError("Requested URL is missing, please provide valid URL")
if url.endswith("{universe}") and not path_parameters:
raise ValueError("Path parameter 'universe' is missing, please provide path parameter")
def inspect_parameters_without_self(class_: object):
cls_init_attributes = dict(inspect.signature(class_.__init__).parameters)
cls_init_attributes.pop("self", None)
return cls_init_attributes.keys()
def version_to_tuple(version: str) -> Tuple[int, ...]:
return tuple(map(int, version.split("."))) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/_tools/_utils.py | 0.491212 | 0.180431 | _utils.py | pypi |
from typing import Optional, Union, TYPE_CHECKING
from ._data_provider import fx_swp_to_swp_data_provider as data_provider
from .._core.session import get_default
from .._tools import Copier
from ..delivery._data._data_provider_layer import _check_response
if TYPE_CHECKING:
from ._fx_spot_quote import FxSpotQuote
from ._fx_swap_points import FxSwapPoints
from .._types import OptDateTime, OptStrStrs
from ..delivery._data._response import Response
def fx_swp_to_swp(
fx_cross_code: str,
*,
market_data_date_time: "OptDateTime" = None,
tenors: "OptStrStrs" = None,
fields: "OptStrStrs" = None,
spot_ccy_1: Optional[Union[dict, "FxSpotQuote"]] = None,
spot_ccy_2: Optional[Union[dict, "FxSpotQuote"]] = None,
swap_points_ccy_1: Optional[Union[dict, "FxSwapPoints"]] = None,
swap_points_ccy_2: Optional[Union[dict, "FxSwapPoints"]] = None,
) -> "Response":
"""
Computes the cross currency curve using the swap points and
spot rates of each of the currencies in the FX Cross pair against the pivot currency
Parameters
----------
fx_cross_code : str
The currency pair of FX Cross, expressed in ISO 4217 alphabetical format (e.g., 'EURCHF').
The user can specify a pivot currency with 3-currency ISO code in the FX Cross (e.g., 'GBPEURCHF'),
where the second currency is the pivot currency. By default, the pivot currency is 'USD'
market_data_date_time : OptDateTime, optional
The date at which the market data is retrieved. The value is expressed
in ISO 8601 format YYYY-MM-DD (e.g. '2021-01-01').
tenors : str, list of str, optional
An array of requested tenors, or/and end dates. the value can be expressed as
the code indicating the time period (e.g., '1m', '6m', '4y'), or in iso 8601
format 'yyy-mm-dd' (e.g., '2021-01-01')
fields : str, list of str, optional
An array of the requested fields
spot_ccy_1 : dict or FxSpotQuote, optional
spot_ccy_2 : dict or FxSpotQuote, optional
swap_points_ccy_1 : dict or FxSwapPoints, optional
swap_points_ccy_2 : dict or FxSwapPoints, optional
Returns
-------
Response
Examples
-------
>>> import refinitiv.data as rd
>>> response = rd.qpl.fx_swp_to_swp(
... fx_cross_code="EURUSD",
... market_data_date_time="2022-09-22",
... spot_ccy_1=rd.qpl.FxSpotQuote(source="D3"),
... spot_ccy_2=rd.qpl.FxSpotQuote(bid=1, ask=2),
... swap_points_ccy_1=rd.qpl.FxSwapPoints(
... additional_tenor_types=[rd.qpl.TenorTypes.LONG],
... source="ICAP",
... ),
... swap_points_ccy_2=rd.qpl.FxSwapPoints(
... additional_tenor_types=[rd.qpl.TenorTypes.LONG, rd.qpl.TenorTypes.ODD],
... source="D3",
... overrides=[
... rd.qpl.TenorBidAsk(tenor="1M", bid=50, ask=60),
... rd.qpl.TenorBidAsk(tenor="2M", bid=90),
... ],
... ),
... )
>>> response.data.df
>>> response.data.raw
"""
session = get_default()
url = session.config.get("apis.data.qpl-functions.endpoints.fx_swp_to_swp")
tenors = tenors and Copier.get_list(tenors)
fields = fields and Copier.get_list(fields)
response = data_provider.get_data(
session,
url,
fx_cross_code=fx_cross_code,
market_data_date_time=market_data_date_time,
tenors=tenors,
fields=fields,
spot_ccy_1=spot_ccy_1,
spot_ccy_2=spot_ccy_2,
swap_points_ccy_1=swap_points_ccy_1,
swap_points_ccy_2=swap_points_ccy_2,
)
_check_response(response, session.config)
return response | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/_qpl/_fx_swp_to_swp.py | 0.938688 | 0.285855 | _fx_swp_to_swp.py | pypi |
import time
from queue import Empty, Queue
from threading import Thread, Lock
from typing import List, Set, Optional, Type, Tuple
from ._abstract_logger import AbstractUsageLogger
from ._utils import LogRecord, RecordData, LoggerConfig
from ._filter_types import FilterType
from .._configure import get_config
class UsageLoggerProxy(Thread):
"""
This class is a wrapper around a threading.Thread that runs a
logging thread. It is used to log usage data from the RD Library.
Parameters
----------
batch_size : int
The maximum number of records to send to the loggers at once.
flush_timeout : int
The maximum time to wait before flushing the batch buffer.
logging_enabled : bool
Whether to enable logging.
Examples
--------
>>> from refinitiv.data.usage_collection import get_usage_logger
>>> class UserLogger(AbstractUsageLogger):
>>> def __init__(self, *args, **kwargs):
>>> ...
>>> def log(self, records: List[LogRecord]) -> None:
>>> ...
>>> def close(self) -> None:
>>> ...
>>>
>>> logger = get_usage_logger()
>>> logger.add_logger(UserLogger)
>>> logger.start()
>>> ...
>>> logger.join()
"""
def __init__(
self,
batch_size: int = 100,
flush_timeout: int = 10,
logging_enabled: bool = True,
) -> None:
super().__init__(name="UsageLoggerThread", daemon=True)
self._queue: Queue = Queue()
self._loggers: List[LoggerConfig] = []
self._lock = Lock()
self._logger_instances: List[Tuple[AbstractUsageLogger, Set[FilterType]]] = []
self._batch_buffer: List[LogRecord] = []
self._batch_size: int = batch_size
self._flush_timeout: int = flush_timeout
self._logging_enabled = logging_enabled
self._last_flush: int = 0
@property
def logging_enabled(self):
return self._logging_enabled
@property
def queue(self):
return self._queue
@staticmethod
def _filter_batch(batch: List[LogRecord], logger_filter: Optional[Set[FilterType]]) -> List[LogRecord]:
"""
Filter the batch by the given filter.
Parameters
----------
batch : List[LogRecord]
logger_filter : Optional[Set[FilterType]]
Returns
-------
"""
return [record for record in batch if not logger_filter or (logger_filter & record.filter)]
def flush(self) -> None:
"""
Flush the batch buffer to the loggers.
Returns
-------
"""
if self._batch_buffer:
# For each registered logger send the whole buffer
for _logger, _filter in self._logger_instances:
_logger.log(self._filter_batch(self._batch_buffer, _filter))
self._batch_buffer.clear()
self._last_flush = time.monotonic()
def _update_loggers(self) -> None:
"""
Update the list of logger instances.
Returns
-------
"""
with self._lock:
for logger_config in self._loggers:
self._logger_instances.append(
(
logger_config.logger_type(*logger_config.args, **logger_config.kwargs),
logger_config.filters,
)
)
self._loggers = []
def run(self) -> None:
self._last_flush = time.monotonic()
while True:
if len(self._loggers) > 0:
self._update_loggers()
try:
record = self._queue.get(timeout=max(0.1, self._flush_timeout - (time.monotonic() - self._last_flush)))
except Empty:
self.flush()
continue
if record is None:
break
self._batch_buffer.append(record)
if len(self._batch_buffer) >= self._batch_size:
self.flush()
self.flush()
for logger, _ in self._logger_instances:
logger.close()
def start(self) -> None:
if self._logging_enabled:
super().start()
def add_logger(
self,
logger: Type[AbstractUsageLogger],
_filter: Set[FilterType],
*args,
**kwargs,
) -> None:
"""
Add a logger to the logging thread.
"""
if self._logging_enabled:
if not issubclass(logger, AbstractUsageLogger):
raise ValueError("Logger must be a subclass of UsageLogger")
with self._lock:
self._loggers.append(LoggerConfig(logger, args, kwargs, _filter))
else:
raise RuntimeError("Tried to add a logger to a disabled logger thread. Check session config")
def log(self, record: LogRecord) -> None:
if self._logging_enabled:
self.queue.put(record)
def log_func(
self,
name: str,
func_path: str,
args: tuple = None,
kwargs: dict = None,
result: object = None,
desc: Set[FilterType] = None,
) -> None:
if self._logging_enabled:
if args is None:
args = ()
if kwargs is None:
kwargs = {}
if desc is None:
desc = set()
self.log(
LogRecord(name, func_path, RecordData(args, kwargs, result), desc),
)
def join(self, timeout: Optional[float] = None) -> None:
"""
Stop the logger thread.
Parameters
----------
timeout : Optional[float]
Returns
-------
"""
if self._logging_enabled:
self.queue.put(None)
try:
super().join(timeout)
except RuntimeError:
pass
self._logging_enabled = False
usage_logger: Optional[UsageLoggerProxy] = None
def get_usage_logger() -> UsageLoggerProxy:
global usage_logger
if usage_logger is None:
logging_enabled = get_config().get_bool("usage_logger.enabled")
usage_logger = UsageLoggerProxy(logging_enabled=logging_enabled)
usage_logger.start()
return usage_logger | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/usage_collection/_logger.py | 0.88565 | 0.170819 | _logger.py | pypi |
from . import _json_requests as json_requests
from ._tools import (
is_string_type,
check_for_string_or_list_of_strings,
check_for_string,
check_for_int,
get_json_value,
tz_replacer,
get_default_session,
)
from .._tools._datetime import to_datetime, get_date_from_today
import pandas as pd
import numpy
from ..errors import RDError
TimeSeries_UDF_endpoint = "TimeSeries"
Calendar_Values = ["native", "tradingdays", "calendardays"]
Corax_Values = ["adjusted", "unadjusted"]
def get_timeseries(
rics,
fields=None,
start_date=None,
end_date=None,
interval=None,
count=None,
calendar=None,
corax=None,
normalize=False,
raw_output=False,
debug=False,
):
"""
Returns historical data on one or several RICs
Parameters
----------
rics: string or list of strings
Single RIC or List of RICs to retrieve historical data for
start_date: string or datetime.datetime or datetime.timedelta
Starting date and time of the historical range.
string format is: '%Y-%m-%dT%H:%M:%S'. e.g. '2016-01-20T15:04:05'.
datetime.timedelta is negative number of day relative to datetime.now().
Default: datetime.now() + timedelta(-100)
You can use the helper function get_date_from_today, please see the usage in the examples section
end_date: string or datetime.datetime or datetime.timedelta
End date and time of the historical range.
string format could be:
'%Y-%m-%d' (e.g. '2017-01-20')
'%Y-%m-%dT%H:%M:%S' (e.g. '2017-01-20T15:04:05')
datetime.timedelta is negative number of day relative to datetime.now().
Default: datetime.now()
You can use the helper function get_date_from_today, please see the usage in the examples section
interval: string
Data interval.
Possible values: 'tick', 'minute', 'hour', 'daily', 'weekly', 'monthly', 'quarterly', 'yearly' (Default 'daily')
Default: 'daily'
fields: string or list of strings
Use this parameter to filter the returned fields set.
Available fields: 'TIMESTAMP', 'VALUE', 'VOLUME', 'HIGH', 'LOW', 'OPEN', 'CLOSE', 'COUNT'
By default all fields are returned.
count: int, optional
Max number of data points retrieved.
calendar: string, optional
Possible values: 'native', 'tradingdays', 'calendardays'.
corax: string, optional
Possible values: 'adjusted', 'unadjusted'
normalize: boolean, optional
If set to True, the function will return a normalized data frame with the following columns 'Date','Security','Field'.
If the value of this parameter is False the returned data frame shape will depend on the number of rics and the number of fields in the response
There are three different shapes:
- One ric and many fields
- Many rics and one field
- Many rics and many fields
Default: False
Remark: This parameter has a less precedence than the parameter rawOutput i.e. if rawOutput is set to True, the returned data will be the raw data and this parameter will be ignored
raw_output: boolean, optional
Set this parameter to True to get the data in json format
if set to False, the function will return a data frame which shape is defined by the parameter normalize
Default: False
debug: boolean, optional
When set to True, the json request and response are printed.
Default: False
Raises
------
Exception
If request fails or if server returns an error.
ValueError
If a parameter type or value is wrong
Examples
--------
>>> import refinitiv.data.eikon as ek
>>> ek.set_app_key('set your app key here')
>>> req = ek.get_timeseries(["MSFT.O"], start_date = "2017-02-01T15:04:05",
>>> end_date = "2017-02-05T15:04:05", interval="tick")
>>> req = ek.get_timeseries(["MSFT.O"], start_date = "2017-03-01",
>>> end_date = "2017-03-10", interval="daily")
>>> req = ek.get_timeseries(["MSFT.O"], start_date = get_date_from_today(150),
>>> end_date = get_date_from_today(100), interval="daily")
"""
logger = get_default_session().logger()
# set the ric(s) in the payload
check_for_string_or_list_of_strings(rics, "rics")
if is_string_type(rics):
rics = [rics.strip()]
if type(rics) == list:
rics = [ric.upper() if ric.islower() else ric for ric in rics]
# set the field(s) in the payload
if fields is None or fields == "*":
fields = ["*"]
else:
check_for_string_or_list_of_strings(fields, "fields")
if is_string_type(fields):
fields = fields.strip().upper().split()
else:
fields = [x.upper() for x in fields]
if "*" in fields:
fields = ["*"]
elif "TIMESTAMP" not in fields:
fields.append("TIMESTAMP")
if interval is None:
interval = "daily"
# check the interval in the payload
check_for_string(interval, "interval")
if start_date is None:
start_date = get_date_from_today(100)
if end_date is None:
end_date = get_date_from_today(0)
start_date = to_datetime(start_date).isoformat()
end_date = to_datetime(end_date).isoformat()
if start_date > end_date:
error_msg = f"end date ({end_date}) should be after than start date ({start_date})"
logger.error(error_msg)
raise ValueError(error_msg)
payload = {
"rics": rics,
"fields": fields,
"interval": interval,
"startdate": start_date,
"enddate": end_date,
}
# Add optional parameters
# set the count in the payload
if count is not None:
check_for_int(count, "count")
payload.update({"count": count})
# set the calendar in the payload
if calendar is not None:
if is_string_type(calendar):
payload.update({"calendar": calendar})
else:
error_msg = "calendar must has string type"
logger.error(error_msg)
raise ValueError(error_msg)
# set the corax in the payload
if corax is not None:
if is_string_type(corax):
payload.update({"corax": corax})
else:
error_msg = "corax must be a string"
logger.error(error_msg)
raise ValueError(error_msg)
response = json_requests.send_json_request(TimeSeries_UDF_endpoint, payload, debug=debug)
ts_result = response.json()
# Catch all errors to raise a warning
ts_timeserie_data = ts_result["timeseriesData"]
ts_status_errors = [ts_data for ts_data in ts_timeserie_data if get_json_value(ts_data, "statusCode") == "Error"]
ts_error_messages = ""
for ts_status in ts_status_errors:
ts_error_message = get_json_value(ts_status, "errorMessage")
ts_error_message = ts_error_message[ts_error_message.find("Description") :]
ts_instrument = get_json_value(ts_status, "ric")
ts_error_message = ts_error_message.replace("Description", ts_instrument)
ts_error_messages += ts_error_message
ts_error_messages += " | "
warning_message = "Error with {}".format(ts_error_message)
logger.warning(warning_message)
# if all timeseries are in error, then raise ElektronError with all error messages
if len(ts_status_errors) == len(ts_timeserie_data):
logger.error(ts_error_messages)
raise RDError("Error", message=ts_error_messages)
if raw_output:
return ts_result
data_frame = None
if normalize:
data_frame = NormalizedDataFrame_Formatter(ts_result).get_data_frame()
else:
data_frame = NiceDataFrame_Formatter(ts_result).get_data_frame()
if len(data_frame) > 0:
data_frame = data_frame.fillna(numpy.nan)
return data_frame
class NormalizedDataFrame_Formatter:
def __init__(self, json_data):
self.json_data = json_data
def get_data_frame(self):
timeseriesList = self.json_data["timeseriesData"]
data_frames = []
for timeseries in timeseriesList:
ric = timeseries["ric"]
error_code = timeseries["statusCode"]
if error_code.lower() == "error":
continue
fields = [f["name"] for f in timeseries["fields"]]
timestamp_index = fields.index("TIMESTAMP")
fields.pop(timestamp_index) # remove timestamp from fields (timestamp is used as index for dataframe)
datapoints = numpy.array(timeseries["dataPoints"])
if len(datapoints):
timestamps = [tz_replacer(value) for value in datapoints[:, timestamp_index]]
timestamps = numpy.array(timestamps, dtype="datetime64") # index for dataframe
# remove timestamp column from numpy array
datapoints = numpy.delete(datapoints, numpy.s_[timestamp_index], 1)
fields_count = len(fields)
column_size = len(datapoints)
symbol_column = numpy.array([ric] * fields_count * column_size)
fields_column = numpy.array(fields * column_size)
values_column = numpy.concatenate(datapoints, axis=0)
values_column = values_column.astype("float")
timestamp_column = [[timestamps[i]] * fields_count for i in range(timestamps.size)]
timestamp_column = numpy.concatenate(timestamp_column, axis=0)
df = pd.DataFrame(
dict(
Date=timestamp_column,
Security=symbol_column,
Field=fields_column,
Value=values_column,
)
)
if not df.empty:
df = df.convert_dtypes()
data_frames.append(df)
else:
data_frames.append(pd.DataFrame([], columns=fields))
return pd.concat(data_frames)
class NiceDataFrame_Formatter:
def __init__(self, json_data):
self.json_data = json_data
def get_data_frame(self):
data_frames, rics, fields = self._get_frame_list()
rics_count = len(rics)
fields_count = len(fields)
if rics_count == 0 or fields_count == 0:
return data_frames
if rics_count == 1:
return self._get_frame_1_ric_N_fields(data_frames, rics[0])
if rics_count > 1 and fields_count == 1:
return self._get_frame_N_rics_1_field(data_frames, rics, fields[0])
return self._get_frame_N_rics_N_fields(data_frames, rics, fields)
def _get_frame_list(self):
timeseriesList = self.json_data["timeseriesData"]
data_frames = []
unique_fields = []
rics = []
for timeseries in timeseriesList:
ric = timeseries["ric"]
error_code = timeseries["statusCode"]
if error_code.lower() == "error":
continue
rics.append(ric)
fields = [f["name"] for f in timeseries["fields"]]
timestamp_index = fields.index("TIMESTAMP")
fields.pop(timestamp_index) # remove timestamp from fields (timestamp is used as index for dataframe)
unique_fields = fields
datapoints = numpy.array(timeseries["dataPoints"])
if len(datapoints):
timestamps = numpy.array(
[tz_replacer(value) for value in datapoints[:, timestamp_index]],
dtype="datetime64",
) # index for dataframe
datapoints = numpy.delete(
datapoints, numpy.s_[timestamp_index], 1
) # remove timestamp column from numpy array
df = pd.DataFrame(datapoints, columns=fields, index=timestamps, dtype="float")
if not df.empty:
df = df.convert_dtypes()
else:
df = pd.DataFrame([], columns=fields)
if not df.empty:
df = df.convert_dtypes()
df.index.name = "Date"
data_frames.append(df)
return data_frames, list(rics), list(unique_fields)
def _get_frame_1_ric_N_fields(self, data_frames, ricName):
data_frame = pd.concat(data_frames, axis=1)
if not data_frame.empty:
data_frame = data_frame.convert_dtypes()
data_frame.axes[1].name = ricName
return data_frame
def _get_frame_N_rics_1_field(self, data_frames, rics, fieldName):
ric_index = 0
for df in data_frames:
ric_name = rics[ric_index]
df.rename(columns={fieldName: ric_name}, inplace=True)
ric_index += 1
data_frame = pd.concat(data_frames, axis=1)
if not data_frame.empty:
data_frame = data_frame.convert_dtypes()
data_frame.axes[1].name = fieldName
return data_frame
def _get_frame_N_rics_N_fields(self, data_frames, rics, fields):
ric_index = 0
for df in data_frames:
ric_name = rics[ric_index]
columns = [(ric_name, f) for f in df.columns]
df.columns = pd.MultiIndex.from_tuples(columns)
ric_index += 1
data_frame = pd.concat(data_frames, axis=1)
data_frame.axes[1].names = ["Security", "Field"]
if data_frame.empty:
return data_frame
else:
return data_frame.convert_dtypes() | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/eikon/_time_series.py | 0.713032 | 0.367667 | _time_series.py | pypi |
import json
import typing
import numpy
import pandas as pd
from .._core.session._default_session_manager import _eikon_default_session_manager
from .._core.session.tools import is_closed
if typing.TYPE_CHECKING:
from .._core.session._session import Session
def is_string_type(value):
try:
return isinstance(value, basestring)
except NameError:
return isinstance(value, str)
def get_json_value(json_data, name):
if name in json_data:
return json_data[name]
else:
return None
def is_list_of_string(values):
return all(is_string_type(value) for value in values)
def check_for_string(parameter, name):
if not is_string_type(parameter):
raise ValueError("The parameter {} should be a string, found {}".format(name, str(parameter)))
def check_for_string_or_list_of_strings(parameter, name):
if type(parameter) != list and (not parameter or not is_string_type(parameter)):
raise ValueError(
"The parameter {} should be a string or a list of string, found {}".format(name, type(parameter))
)
if type(parameter) == list and not is_list_of_string(parameter):
raise ValueError(
"All items in the parameter {} should be of data type string, found {}".format(
name, [type(v) for v in parameter]
)
)
def check_for_int(parameter, name):
if type(parameter) is not int:
raise ValueError(
"The parameter {} should be an int, found {} type value ({})".format(name, type(parameter), str(parameter))
)
def build_list_with_params(values, name):
if values is None:
raise ValueError(name + " is None, it must be a string or a list of strings")
if is_string_type(values):
return [(v, None) for v in values.split()]
elif type(values) is list:
try:
return [(value, None) if is_string_type(value) else (value[0], value[1]) for value in values]
except Exception:
raise ValueError(name + " must be a string or a list of strings or a tuple or a list of tuple")
else:
try:
return values[0], values[1]
except Exception:
raise ValueError(name + " must be a string or a list of strings or a tuple or a list of tuple")
def build_list(values, name):
if values is None:
raise ValueError(name + " is None, it must be a string or a list of strings")
if is_string_type(values):
return [values.strip()]
elif type(values) is list:
if all(is_string_type(value) for value in values):
return [value for value in values]
else:
raise ValueError(name + " must be a string or a list of strings")
else:
raise ValueError(name + " must be a string or a list of strings")
def build_dictionary(dic, name):
if dic is None:
raise ValueError(name + " is None, it must be a string or a dictionary of strings")
if is_string_type(dic):
return json.loads(dic)
elif type(dic) is dict:
return dic
else:
raise ValueError(name + " must be a string or a dictionary")
def tz_replacer(s: str) -> str:
if isinstance(s, str):
if s.endswith("Z"):
s = s[:-1]
elif s.endswith("-0000"):
s = s[:-5]
if s.endswith(".000"):
s = s[:-4]
return s
def set_default_session(session: "Session"):
_eikon_default_session_manager.set_default_session(session)
def get_default_session(app_key=None) -> "Session":
return _eikon_default_session_manager.get_default_session(app_key)
def close_session():
_eikon_default_session_manager.get_default_session().close()
def set_app_key(app_key):
_session = get_default_session(app_key)
if is_closed(_session):
_session.open()
def set_log_level(log_level):
default_session = _eikon_default_session_manager.get_default_session()
default_session.set_log_level(log_level)
def convert_content_data_to_df_udf(raw: dict) -> pd.DataFrame:
selected_fields = ["versionCreated", "text", "storyId", "sourceCode"]
raw_headlines = raw.get("headlines", [])
first_created = [tz_replacer(headline["firstCreated"]) for headline in raw_headlines]
headlines = [[headline[field] for field in selected_fields] for headline in raw_headlines]
if len(headlines):
df = pd.DataFrame(
headlines,
numpy.array(first_created, dtype="datetime64[ns]"),
selected_fields,
)
if not df.empty:
df = df.convert_dtypes()
else:
df = pd.DataFrame([], numpy.array(first_created, dtype="datetime64[ns]"), selected_fields)
df["versionCreated"] = df.versionCreated.apply(pd.to_datetime)
df.fillna(pd.NA, inplace=True)
return df | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/eikon/_tools.py | 0.505615 | 0.350449 | _tools.py | pypi |
import numpy
import pandas as pd
from . import _json_requests as json_requests
from ._tools import (
get_json_value,
is_string_type,
check_for_string_or_list_of_strings,
build_dictionary,
build_list,
get_default_session,
)
DataGrid_UDF_endpoint = "DataGrid"
DataGridAsync_UDF_endpoint = "DataGrid_StandardAsync"
def TR_Field(field_name, params=None, sort_dir=None, sort_priority=None):
"""
This is a helper legacy to build the field for the get_data legacy.
Parameters
----------
field_name: string
Field name to request. You can find the list in Data Item Browser.
params: dict
Dictionary containing the parameters for the field passed in the argument field_name
sort_dir: string
Indicate the sort direction. Possible values are 'asc' or 'desc'. The default value is 'asc'
sort_priority: integer
Gives a priority to the field for the sorting. The highest priority is 0 (zero). the default value is None
Return
------
Returns a dictionary that can directly passed to get_data.
Example
-------
TR_Field('tr.revenue')
TR_Field('tr.open','asc',1)
TR_Field('TR.GrossProfit',{'Scale': 6, 'Curn': 'EUR'},'asc',0)
"""
logger = get_default_session().logger()
if params and type(params) != dict:
logger.error("TR_Field error: The argument params must be a dictionary")
raise ValueError("TR_Field error: The argument params must be a dictionary")
if type(params) == dict and not bool(params):
error_msg = "TR_Field error: The argument params must be a non empty dictionary or set to None (default value if not set)"
logger.error(error_msg)
raise ValueError(error_msg)
field = {field_name: {}}
if params:
field[field_name]["params"] = params
if sort_dir:
if is_string_type(sort_dir) and sort_dir in ["asc", "desc"]:
field[field_name]["sort_dir"] = sort_dir
else:
error_msg = 'TR_Field error: The argument sort_dir must be a string ("asc" or "desc")'
logger.error(error_msg)
raise ValueError(error_msg)
if sort_priority:
if type(sort_priority) is not int:
error_msg = "TR_Field error: The argument sort_priority must be a integer"
logger.error(error_msg)
raise ValueError(error_msg)
field[field_name]["sort_priority"] = sort_priority
return field
def get_data(
instruments,
fields,
parameters=None,
field_name=False,
raw_output=False,
debug=False,
raw_response=False,
):
"""
Returns a pandas.DataFrame with fields in columns and instruments as row index
Parameters
----------
instruments: string or list
Single instrument or list of instruments to request.
fields: string, dictionary or list of strings and/or dictionaries.
List of fields to request.
Examples:
- 'TR.PriceClose'
- {'TR.GrossProfit': { 'params':{ 'Scale': 6, 'Curn': 'EUR' }}
- {'TR.GrossProfit': { 'params':{ 'Scale': 6, 'Curn': 'EUR' },sort_dir:'desc'}
- ['TR.PriceClose','TR.PriceOpen']
- [{'TR.PriceClose': {'sort_dir':asc,sort_priority:1}},{'TR.PriceOpen': {'sort_dir':asc,sort_priority:0}}
You can use the legacy TR_Field to build the fields:
>>> fields = [ek.TR_Field('tr.revenue'),ek.TR_Field('tr.open','asc',1),ek.TR_Field('TR.GrossProfit',{'Scale': 6, 'Curn': 'EUR'},'asc',0)]
>>> data, err = ek.get_data(["IBM","MSFT.O"],fields)
Tips:
You can launch the Data Item Browser to discover fields and parameters,
or copy field names and parameters from TR Eikon - MS Office formulas
parameters: string or dictionary, optional
Single global parameter key=value or dictionary of global parameters to request.
Default: None
field_name: boolean, optional
Define if column headers are filled with field name or display names.
If True value, field names will ube used as column headers. Otherwise, the full display name will be used.
Default: False
raw_output: boolean, optional
By default the output is a pandas.DataFrame.
Set raw_output=True to get data in Json format.
Default: False
debug: boolean, optional
When set to True, the json request and response are printed. Default value is False
Returns
-------
pandas.DataFrame
Returns pandas.DataFrame with fields in columns and instruments as row index
errors
Returns a list of errors
Raises
----------
Exception
If http request fails or if server returns an error.
ValueError
If a parameter type or value is wrong.
Examples
--------
>>> import eikon as ek
>>> ek.set_app_key('set your app key here')
>>> data, err = ek.get_data(["IBM", "GOOG.O", "MSFT.O"], ["TR.PriceClose", "TR.Volume", "TR.PriceLow"])
>>> data, err = ek.get_data("IBM", ['TR.Employees', {'TR.GrossProfit':{'params':{'Scale': 6, 'Curn': 'EUR'},'sort_dir':'asc'}}])
>>> fields = [ek.TR_Field('tr.revenue'),ek.TR_Field('tr.open',None,'asc',1),ek.TR_Field('TR.GrossProfit',{'Scale': 6, 'Curn': 'EUR'},'asc',0)]
>>> data, err = ek.get_data(["IBM","MSFT.O"],fields)
"""
logger = get_default_session().logger()
check_for_string_or_list_of_strings(instruments, "instruments")
instruments = build_list(instruments, "instruments")
instruments = [value.upper() if value.islower() else value for value in instruments]
if parameters:
parameters = build_dictionary(parameters, "parameters")
fields = parse_fields(fields)
fields_for_request = []
for f in fields:
keys = list(f.keys())
if len(keys) != 1:
with "get_data error: The field dictionary should contain a single key which is the field name" as msg:
logger.error(msg)
raise ValueError(msg)
name = list(f.keys())[0]
field_info = f[name]
if type(field_info) != dict:
with "get_data error: The parameters for the file {} should be passed in a dict".format(name) as error_msg:
logger.error(error_msg)
raise ValueError(error_msg)
field = {"name": name}
if "sort_dir" in list(field_info.keys()):
field["sort"] = field_info["sort_dir"]
if "sort_priority" in list(field_info.keys()):
field["sortPriority"] = field_info["sort_priority"]
if "params" in list(field_info.keys()):
field["parameters"] = field_info["params"]
fields_for_request.append(field)
payload = {"instruments": instruments, "fields": fields_for_request}
if parameters:
payload.update({"parameters": parameters})
_endpoint = DataGridAsync_UDF_endpoint
if _endpoint == DataGridAsync_UDF_endpoint:
payload = {"requests": [payload]}
response = json_requests.send_json_request(_endpoint, payload, debug=debug, raw_response=raw_response)
if raw_response:
return response
result = response.json()
if result.get("responses"):
result = result["responses"][0]
if raw_output:
return result
return get_data_frame(result, field_name)
def parse_fields(fields):
if is_string_type(fields):
return [{fields: {}}]
logger = get_default_session().logger()
if type(fields) == dict:
if len(fields) == 0:
with "get_data error: fields list must not be empty" as error_msg:
logger.error(error_msg)
raise ValueError(error_msg)
return [fields]
field_list = []
if type(fields) == list:
if len(fields) == 0:
with "get_data error: fields list must not be empty" as error_msg:
logger.error(error_msg)
raise ValueError(error_msg)
for f in fields:
if is_string_type(f):
field_list.append({f: {}})
elif type(f) == dict:
field_list.append(f)
else:
error_msg = "get_data error: the fields should be of type string or dictionary"
get_default_session().logger().error(error_msg)
raise ValueError(error_msg)
return field_list
error_msg = (
"get_data error: the field parameter should be a string, a dictionary , or a list of strings|dictionaries"
)
get_default_session().logger().error(error_msg)
raise ValueError(error_msg)
def get_data_value(value):
if is_string_type(value):
return value
elif value is dict:
return value["value"]
else:
return value
def get_data_frame(data_dict, field_name=False):
if "headers" not in data_dict:
return None
raw_headers = data_dict["headers"][0]
if field_name:
headers = [header.get("field", header.get("displayName")) for header in raw_headers]
else:
headers = [header["displayName"] for header in raw_headers]
data = numpy.array([[get_data_value(value) for value in row] for row in data_dict["data"]])
if len(data):
df = pd.DataFrame(data, columns=headers)
df = df.apply(pd.to_numeric, errors="ignore")
if not df.empty:
df = df.convert_dtypes()
else:
df = pd.DataFrame([], columns=headers)
errors = get_json_value(data_dict, "error")
return df, errors | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/eikon/_data_grid.py | 0.705886 | 0.328287 | _data_grid.py | pypi |
import json
from . import _json_requests as json_requests
from ._tools import is_string_type, get_default_session, convert_content_data_to_df_udf
from .._tools._datetime import to_datetime
News_Headlines_UDF_endpoint = "News_Headlines"
News_Story_UDF_endpoint = "News_Story"
def get_news_headlines(
query=None,
count=10,
repository="NewsWire",
date_from=None,
date_to=None,
raw_output=False,
debug=False,
):
"""
Returns a list of news headlines
Parameters
----------
query: string, optional
News headlines search criteria.
The text can contain RIC codes, company names, country names and
operators (AND, OR, NOT, IN, parentheses and quotes for explicit search…).
Tip: Append 'R:' in front of RIC names to improve performance.
Default: Top News written in English
count: int, optional
Max number of headlines retrieved.
Value Range: [1-100].
Default: 10
repository: string, list of strings, optional
Possible values:
- "NewsWire"
- "NewsRoom"
- "WebNews"
For "NewsRoom" and "WebNews" repositories a query must be defined.
date_from: string or datetime, optional
Beginning of date range.
String format is: '%Y-%m-%dT%H:%M:%S'. e.g. '2016-01-20T15:04:05'.
date_to: string or datetime, optional
End of date range.
String format is: '%Y-%m-%dT%H:%M:%S'. e.g. '2016-01-20T15:04:05'.
raw_output: boolean, optional
Set this parameter to True to get the data in json format
if set to False, the legacy will return a data frame
Default: False
debug: boolean, optional
When set to True, the json request and response are printed.
Default: False
Returns
-------
pandas.DataFrame
Returns a DataFrame of news headlines with the following columns:
- Index : Timestamp of the publication time
- version_created : Date of the latest update on the news
- text : Text of the Headline
- story_id : Identifier to be used to retrieve the full story using the get_news_story legacy
- source_code : Second news identifier
Raises
----------
Exception
If http request fails or if server returns an error
AttributeError
If a parameter type is wrong
Examples
--------
>> import refinitiv.data as ek
>> ek.set_app_key('set your app id here')
>> headlines = ek.get_news_headlines("R:MSFT.O", 2)
>> headlines
versionCreated text \
2016-04-13 18:28:57.000 2.016-04-13 18:28:59.001 RBC Applies Blockchain as a Loyalty Boost<MSFT...
2016-04-13 17:28:21.577 2016-04-13 17:28:21.671 UPDATE 2-Long-stalled email privacy bill advan...
storyId
2016-04-13 18:28:57.000 urn:newsml:reuters.com:20160413:nNRA1uxh03:1
2016-04-13 17:28:21.577 urn:newsml:reuters.com:20160413:nL2N17G16Q:2
>> headlines = ek.get_news_headlines("R:MSFT.O IN FRANCE")
>> headlines = ek.get_news_headlines("R:MSFT.O IN FRANCE IN ENGLISH", count=5)
>> headlines = ek.get_news_headlines("OBA* OR CLINTON IN ENGLISH", count=5)
"""
logger = get_default_session().logger()
if query is None:
query = "Topic:TOPALL and Language:LEN"
# check parameters type and values
if not is_string_type(query):
error_msg = "query must be a string"
logger.error(error_msg)
raise ValueError(error_msg)
# query string must be formated as a "" string containing '' substrings
# (and not a '' string containing "" substrings)
query = query.replace('"', "'")
# validate query JSON format
test_query = '{"query":"' + query + '"}'
try:
json.loads(str(test_query))
except ValueError as error:
error_msg = "query {} has invalid format. {}".format(test_query, str(error))
logger.debug(error_msg)
raise ValueError(error_msg)
if type(count) is not int:
error_msg = "count must be an integer"
logger.error(error_msg)
raise ValueError(error_msg)
elif count < 0:
error_msg = "count must be equal or greater than 0"
logger.error(error_msg)
raise ValueError(error_msg)
if isinstance(repository, list):
repository = ",".join(repository)
if not isinstance(repository, str):
error_msg = "repository must be a string or a list of strings"
raise ValueError(error_msg)
# build the payload
app_key = get_default_session().app_key
payload = {
"number": str(count),
"query": query,
"repository": repository,
"productName": app_key,
"attributionCode": "",
}
if date_from is not None:
payload.update({"dateFrom": to_datetime(date_from).isoformat()})
if date_to is not None:
payload.update({"dateTo": to_datetime(date_to).isoformat()})
response = json_requests.send_json_request(News_Headlines_UDF_endpoint, payload, debug=debug)
result = response.json()
if raw_output:
return result
else:
return convert_content_data_to_df_udf(result)
def get_news_story(story_id, raw_output=False, debug=False):
"""
Return a single news story corresponding to the identifier provided in story_id
Parameters
----------
story_id: The story id. The story id is a field you will find in every headline you retrieved
with the legacy get_news_headlines
raw_output: boolean
Set this parameter to True to get the data in json format
if set to False, the legacy will return returns the story content
The default value is False
debug: bool
When set to True, the json request and response are printed.
Raises
------
Exception
If http request fails or if Refinitiv Services return an error
ValueError
If a parameter type or value is wrong
Examples
--------
>> import refinitiv.data as ek
>> ek.set_app_key('set your app key here')
>> headlines = ek.get_news_headlines('IBM')
>> for index, headline_row in headlines.iterrows():
story = ek.get_news_story(headline_row['storyId'])
print (story)
"""
logger = get_default_session().logger()
# check parameters type and values
if not is_string_type(story_id):
error_msg = "story_id must be a string"
logger.error(error_msg)
raise ValueError(error_msg)
app_key = get_default_session().app_key
payload = {"attributionCode": "", "productName": app_key, "storyId": story_id}
response = json_requests.send_json_request(News_Story_UDF_endpoint, payload, debug=debug)
json_data = response.json()
if raw_output:
return json_data
if json_data:
if json_data.get("story"):
if json_data.get("story").get("storyHtml"):
return json_data.get("story").get("storyHtml")
else:
return None
elif json_data.get("viewURL"):
return json_data.get("viewURL")
elif json_data.get("webURL"):
return json_data.get("webURL")
return None | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/eikon/_news_request.py | 0.643105 | 0.220657 | _news_request.py | pypi |
from typing import TYPE_CHECKING, Union
from .. import content
from .._types import OptStr, OptInt
from ..content.search import Views
if TYPE_CHECKING:
import pandas as pd
def search(
query: OptStr = None,
view: Union[Views, str] = Views.SEARCH_ALL,
filter: OptStr = None,
order_by: OptStr = None,
boost: OptStr = None,
select: OptStr = None,
top: OptInt = 10,
skip: OptInt = 0,
group_by: OptStr = None,
group_count: OptInt = 3,
features: OptStr = None,
scope: OptStr = None,
terms: OptStr = None,
) -> "pd.DataFrame":
"""
This class describe parameters to retrieve data for search.
Parameters
----------
query: str, optional
Keyword argument for view
view: Views or str, optional
The view for searching see at Views enum.
Default: Views.SEARCH_ALL
filter: str, optional
Where query is for unstructured end-user-oriented restriction, filter is for
structured programmatic restriction.
order_by: str, optional
Defines the order in which matching documents should be returned.
boost: str, optional
This argument supports exactly the same predicate expression syntax as filter,
but where filter restricts which documents are matched at all,
boost just applies a large scoring boost to documents it matches,
which will almost always guarantee that they appear at the top of the results.
select: str, optional
A comma-separated list of the properties of a document to be returned in the response.
top: int, optional
the maximum number of documents to retrieve. Must be non-negative.
default: 10
skip: int, optional
The number of documents to skip in the sorted result set before returning the
next top.
group_by: str, optional
If specified, this must name a single Groupable property.
returned documents are grouped into buckets based on their value for this
property.
group_count: str, optional
When supplied in combination with group_by, sets the maximum number of documents
to be returned per bucket.
default: 3
Examples
--------
>>> from refinitiv.data as rd
>>> df = rd.discovery.search(query="cfo", view=rd.discovery.Views.PEOPLE)
"""
return (
content.search.Definition(
query=query,
view=view,
filter=filter,
order_by=order_by,
boost=boost,
select=select,
top=top,
skip=skip,
group_by=group_by,
group_count=group_count,
features=features,
scope=scope,
terms=terms,
)
.get_data()
.data.df
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/discovery/_search.py | 0.911176 | 0.366221 | _search.py | pypi |
import pandas as pd
from humps import pascalize
from refinitiv.data._tools import inspect_parameters_without_self
from refinitiv.data.content import search
from .base import Target, TargetTemplate
class DefinitionGetDataTarget(Target):
"""Template Target class that can be used with any definition to create templates"""
definition = None
def __init__(self):
super().__init__()
self.args_names = set(inspect_parameters_without_self(self.definition))
def __call__(self, **kwargs):
return self.definition(**kwargs).get_data().data.df
class DiscoverySearchTarget(DefinitionGetDataTarget):
"""Template Target class specific for discovery search"""
definition = search.Definition
class DiscoverySearchTemplate(TargetTemplate):
"""Discovery search preset class"""
_target_class = DiscoverySearchTarget
def __repr__(self):
return f"<DiscoverySearchTemplate '{self.name}'>"
# redefining search only to add return type annotation
def search(self, **kwargs) -> pd.DataFrame:
"""Please, use help() on a template object itself to get method documentation"""
# ^ we need this docstring because we can't easily generate docstring for
# the method, but can change __doc__ for the class instance
return super().search(**kwargs)
def _export(self) -> dict:
"""Get dictionary that can be used in config to duplicate this template
Or use it as a base for another search template.
Exported without documentation.
Experimental, does not work with all subfeatures and may never be
"""
request_body = pascalize(self._pass_through_defaults)
request_body.update({pascalize(k): v.template for k, v in self._templated_defaults.items()})
if "View" in request_body and isinstance(request_body["View"], search.Views):
request_body["View"] = request_body["View"].value
result = {"request_body": request_body}
if self._placeholders_defaults:
result["parameters"] = {name: {"default": value} for name, value in self._placeholders_defaults.items()}
return result | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/discovery/_search_templates/search.py | 0.601242 | 0.173743 | search.py | pypi |
import logging
from functools import lru_cache
from threading import Lock
from typing import Optional, Dict, Any, Set, Iterable
from pandas import DataFrame
from refinitiv.data._tools.templates import StringTemplate
from .namespaces import Namespace
class SearchInterrupt(Exception):
pass
class Target:
"""Abstract callable target with accessible list of keyword arguments names
Base for building templates in TargetTemplate.
You need to define your own targets that do some work. Like DiscoverySearchTarget.
"""
def __init__(self):
self.args_names: Set[str] = set()
def __call__(self, **kwargs):
pass
def extract_used_templates_from_placeholders_and_namespace(
names: Set[str], namespace: Namespace
) -> Dict[str, Set[str]]:
"""Get a full path to namespaces that was used in the set of template placeholders
In the string template we can have placeholders like
"builtins.utils.geo.Mines.location.lat". And it's impossible to decide just by
looking on it what part of this string is namespaces chain (prefix), what is
Template name, and what is chain of attributes of the Template call result (suffix).
The goal of these functions is to find maximum namespaces + Template name chain
based on given namespace.
For example, if in namespace we have template with the namespace chain
"builtins.utils.geo" and name "Mines", we will return "builtins.utils.geo.Mines".
And what left of original string must be processed as a result attributes later.
You must be sure that you are passing as a names arguments all names
that MUST BE treated as a sub-template usage. In other case, exception will be raised.
Parameters
----------
names: Set[str]
set of placeholder names, without suffixes like jinja filters, but with
attributes of the expected sub-template output
namespace: Namespace
namespace object of your template
Returns
-------
Set[str]
Templates used, full paths, without attributes
Raises
------
ValueError
If it's impossible to find template for one of your placeholders
"""
# pretending we can't have template on the root on namespace here
ns_prefixes = namespace.keys()
def match_name(orig_name):
"""Check if orig_name or its prefix is presented in the namespace
Function expects that orig_name already starts with one of the root namespace
prefixes. So it always matches at least to this prefix.
"""
cur_name = orig_name
attr_list = []
while True:
try:
found = namespace.get(cur_name)
except KeyError:
raise ValueError(f"Unknown sub-template usage: {orig_name}")
if found:
break
else:
# cut the tail
cur_name, suffix = cur_name.rsplit(".", maxsplit=1)
attr_list.append(suffix)
return cur_name, ".".join(reversed(attr_list))
result = {}
for name in names:
prefix = name.split(".", maxsplit=1)[0]
if prefix in ns_prefixes:
matched, rest = match_name(name)
if matched and matched not in ns_prefixes:
if matched not in result:
result[matched] = set()
result[matched].add(rest)
return result
class TargetTemplate:
"""Abstract target search preset
Initialized with default values for defined Target
Any string value acts as template string. You can use placeholder variables,
and those variables will be required to prepare search parameters through
`._search_kwargs()` or to launch search through `.search()`.
Placeholder variables syntax based on jinja2 with some changes:
- Variables defined with #{varname} syntax
- Instructions with {{}} symbols. Available instructions:
- {{if}}<one value>{{else}}<other value>{{endif}}
Attributes
----------
name: str
name of the template
"""
_target_class = Target
_cache_lock = Lock()
def __init__(
self,
name=None,
*,
placeholders_defaults: Optional[Dict[str, Any]] = None,
pass_through_defaults: Optional[Dict[str, Any]] = None,
optional_placeholders: Iterable[str] = None,
ns: "Namespace" = None,
**search_defaults,
):
"""
Parameters
----------
name : str, optional
name of the template
placeholders_defaults: dict, optional
Dict of string template placeholders default values.
pass_through_defaults: dict, optional
default values for the Target parameters
optional_placeholders: Iterable[str], optional
names of placeholders that are optional without default values
ns: Namespace
Namespace in which template will operate. Used for sub-templates.
"""
self._ns = ns if ns is not None else Namespace()
self._target: Target = self._target_class()
# Names of the templates used, with full path from self.ns root
self._subtemplates_used = {}
""" List search keyword arguments we can use in this template """
self._placeholders_defaults = {} if placeholders_defaults is None else placeholders_defaults
self._optional_placeholders = set([] if optional_placeholders is None else optional_placeholders)
""" Default template variables values for a templated defaults """
if pass_through_defaults is None:
pass_through_defaults = {}
bad_pass_through_params = set(pass_through_defaults) - self._target.args_names
if bad_pass_through_params:
raise ValueError(
"All the parameters described in 'parameters' section of search "
"template configuration, must be either placeholders variables or "
"parameters of the discovery search Definition. These parameters are "
"neither of them: " + ", ".join(bad_pass_through_params)
)
self.name = name
unknown_defaults = set(search_defaults) - self._target.args_names
if unknown_defaults:
raise ValueError(
"These arguments are defined in template, but not in search Definition: " + ", ".join(unknown_defaults)
)
# Names of all placeholders inside string templates
"""Set of names for all placeholders in string templates"""
self._placeholder_names: Set[str] = set()
# Arguments to be passed to Definition as templates
self._templated_defaults: Dict[str, StringTemplate] = {}
# Arguments to be directly passed to Definition without any preprocessing
self._pass_through_defaults: Dict[str, Any] = {}
def is_subtemplate(placeholder_name: str):
return any(placeholder_name.startswith(prefix) for prefix in self._ns.keys())
for name, value in search_defaults.items():
if not isinstance(value, str):
self._pass_through_defaults[name] = value
continue
template = StringTemplate(value)
template.validate()
if template.placeholders():
self._templated_defaults[name] = template
for name in template.placeholders():
if not is_subtemplate(name):
if "." in name:
logging.info(
f"Placeholder {name} for template {self.name} contains "
f"dots, but has an invalid prefix."
)
self._placeholder_names.add(name)
else:
self._pass_through_defaults[name] = value
self._subtemplates_used.update(
extract_used_templates_from_placeholders_and_namespace(template.placeholders(), self._ns)
)
self._pass_through_defaults.update(pass_through_defaults)
bad_tpl_var_names = self._get_full_placeholder_names() & self._target.args_names
if bad_tpl_var_names:
raise ValueError(
"You can't use template arguments with the same name"
" as search arguments. You have used: " + ", ".join(bad_tpl_var_names)
)
def _get_full_placeholder_names(self):
placeholder_names = self._placeholder_names.copy()
for st_path in self._subtemplates_used.keys():
placeholder_names.update(self._ns.get(st_path)._get_full_placeholder_names())
return placeholder_names
def __repr__(self):
return f"<TargetTemplate for {self._target_class.__name__} name='{self.name}'>"
def _search_kwargs(self, **kwargs) -> dict:
"""Get dictionary of arguments for the Target"""
undefined_placeholders = (
self._get_full_placeholder_names()
- set(kwargs)
- set(self._placeholders_defaults)
- self._optional_placeholders
)
assert all("." not in ph for ph in undefined_placeholders)
if undefined_placeholders:
raise KeyError(
"Following keyword arguments must be defined, but they are not: " + ", ".join(undefined_placeholders)
)
unexpected_arguments = (
set(kwargs)
- self._get_full_placeholder_names()
# templated defaults can't be redefined
- (self._target.args_names - self._templated_defaults.keys())
)
if unexpected_arguments:
raise KeyError(
f"Unexpected arguments: {', '.join(unexpected_arguments)}."
f"Possible arguments for template '{self.name}': "
f"{', '.join(self._target.args_names)}"
)
old_kwargs = kwargs
kwargs = kwargs.copy()
# Applying template variables defaults
for name, value in self._placeholders_defaults.items():
if name not in kwargs:
kwargs[name] = value
def set_path_value(path, value):
"""Set value to kwargs dict of dicts by path with dots
Expecting to always have at least one dot in path, because subtemplate
always has a prefix.
"""
parts = path.split(".")
assert len(parts) > 1
current = kwargs
for part in parts[:-1]:
if part not in current:
current[part] = {}
current = current[part]
current[parts[-1]] = value
def check_result(value, required_attributes):
for attr in required_attributes:
if attr not in value:
raise ValueError(f"{attr} is not in the result of search.")
return value
for subname, required_items in self._subtemplates_used.items():
st = self._ns.get(subname)
filtered_kwargs = {k: v for k, v in old_kwargs.items() if k in st._get_full_placeholder_names()}
try:
set_path_value(subname, check_result(st._search(**filtered_kwargs), required_items))
except ValueError as e:
raise SearchInterrupt(
f"Result of {st} search with parameters {filtered_kwargs} did not contain all required values"
) from e
result = self._pass_through_defaults.copy()
# Apply variables to templated defaults
for name, template in self._templated_defaults.items():
result[name] = template.substitute(**kwargs)
# Apply other variables from kwargs
for name, value in kwargs.items():
if name not in self._get_full_placeholder_names() and name not in self._ns.keys():
result[name] = value
return result
@lru_cache(None)
def _search(self, **kwargs) -> Any:
"""Target call with given parameters"""
try:
return self._target(**self._search_kwargs(**kwargs))
except SearchInterrupt:
return DataFrame()
def search(self, **kwargs) -> Any:
"""Public uncached call to search"""
with self._cache_lock:
result = self._search(**kwargs)
self._search.cache_clear()
return result | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/discovery/_search_templates/base.py | 0.903201 | 0.446193 | base.py | pypi |
from typing import Union, List, TYPE_CHECKING
from humps import depascalize
from refinitiv.data import get_config
from .embedded import (
RICCategoryTemplate,
UnderlyingRICToOptionTemplate,
UnderlyingRICToFutureTemplate,
RICToIssuerTemplate,
OrganisationPermIDToUPTemplate,
FutureRICToFutureTemplate,
)
from .search import DiscoverySearchTemplate
from .namespaces import Namespace
from .utils import generate_docstring
if TYPE_CHECKING:
from .base import TargetTemplate
def depascalize_view(value: str) -> str:
"""Convert search View value to upper snakecase
We need to have separate function for that because some enum View values
does not follow the rules of pascal case
"""
if "STIRs" in value:
value = value.replace("STIRs", "Stirs")
return depascalize(value).upper()
def template_from_config_data(name: str, data: dict, config_prefix: str):
namespace = Namespace(
builtin=BuiltinNamespace(),
user=UserNamespace(config_prefix=config_prefix),
_=Namespace(
**{ # TODO: locals can use global namespaces too
name: template_from_config_data(name, sub, config_prefix)
for name, sub in data.get("locals", {}).items()
}
),
)
template = DiscoverySearchTemplate(
name,
placeholders_defaults={
name: attrs["default"] for name, attrs in data.get("parameters", {}).items() if "default" in attrs
},
optional_placeholders={
name for name, attrs in data.get("parameters", {}).items() if attrs.get("optional", False)
},
ns=namespace,
**depascalize(data.get("request_body", {})),
)
method_args = {}
# Some placeholders may be only in string, but not in "parameters"
for param in sorted(template._placeholder_names):
method_args[param] = data.get("parameters", {}).get(param, {})
# That's why we can get them all in one cycle with pass-through parameters
# that is located in "parameters" config session, but not in template string
for param, desc in data.get("parameters", {}).items():
if param not in template._placeholder_names:
method_args[param] = desc
template.__doc__ = generate_docstring(
description=data.get("description", ""),
methods={"search": {"description": "", "args": method_args}},
)
return template
class BuiltinNamespace(Namespace):
def __setitem__(self, key: str, value: Union["Namespace", "TargetTemplate"]):
raise NotImplementedError("Setting namespace key-value not supported for built-in namespace")
def __getitem__(self, name: str):
attr = getattr(SearchTemplates, name)
if not isinstance(attr, DiscoverySearchTemplate):
raise AttributeError(f"Embedded search template named {name} is not found")
return attr
class UserNamespace(Namespace):
_blacklisted_keys = {"request_body"}
def __init__(self, config_prefix):
super().__init__()
self._CONFIG_PREFIX = config_prefix
def __iter__(self):
return get_config().get(self._CONFIG_PREFIX, {}).keys().__iter__()
def __setitem__(self, key: str, value: Union["Namespace", "TargetTemplate"]):
raise NotImplementedError("Setting namespace key-value not supported for user namespace")
def __getitem__(self, name: str) -> Union[DiscoverySearchTemplate, "UserNamespace"]:
if name in self._blacklisted_keys:
raise KeyError(f"'{name}' is a reserved key")
config = get_config()
key = f"{self._CONFIG_PREFIX}.{name}"
if key not in config:
raise KeyError(f"Template or Namespace '{name}' is not found in the config")
data = config[key] or {}
if "request_body" not in data:
return UserNamespace(key)
data = config[key].as_attrdict()
return template_from_config_data(name, data, self._CONFIG_PREFIX)
def keys(self) -> List[str]:
"""Get list of available search template names"""
return list(self)
class SearchTemplates(UserNamespace):
"""Easy access to search templates from the library config
Check if search template with the name "Equity" is defined in the config:
>>> templates = SearchTemplates()
>>> "Equity" in templates
True
Get "Equity" search template:
>>> templates["Equity"]
Get list of available search template names:
>>> templates.keys()
["Equity"]
"""
RICCategory = RICCategoryTemplate()
UnderlyingRICToOption = UnderlyingRICToOptionTemplate()
UnderlyingRICToFuture = UnderlyingRICToFutureTemplate()
RICToIssuer = RICToIssuerTemplate()
FutureRICToFuture = FutureRICToFutureTemplate()
OrganisationPermIDToUP = OrganisationPermIDToUPTemplate()
def __init__(self):
super().__init__(config_prefix="search.templates")
def __setitem__(self, key: str, value: Union["Namespace", "TargetTemplate"]):
raise NotImplementedError("Setting namespace key-value not supported for global namespace") | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/discovery/_search_templates/manage.py | 0.638159 | 0.201322 | manage.py | pypi |
import pandas as pd
from .search import DiscoverySearchTemplate
from .namespaces import Namespace
class RICCategoryTemplate(DiscoverySearchTemplate):
"""Returns the category for a given RIC"""
def __init__(self):
super().__init__(
name="RICCategory",
pass_through_defaults={
"view": "QuotesAndSTIRs",
"select": "RCSAssetCategoryLeaf",
"top": 1000,
},
filter="RIC eq '#{ric}'",
)
def _search_kwargs(self, *, ric, **kwargs) -> dict:
return super()._search_kwargs(ric=ric, **kwargs)
def search(self, *, ric, **kwargs) -> pd.DataFrame:
"""Launch search, get DataFrame
Parameters
----------
ric
The RIC for which to search the category.
Returns
-------
DataFrame
Default columns: RCSAssetCategoryLeaf
"""
return super().search(ric=ric, **kwargs)
class UnderlyingRICToOptionTemplate(DiscoverySearchTemplate):
"""Simple search for options on any underlying"""
def __init__(self):
super().__init__(
name="UnderlyingRICToOption",
pass_through_defaults={
"view": "SearchAll",
"select": "RIC,DTSubjectName,ExpiryDateString,StrikePrice,CallPutOption,ContractType,Currency",
"order_by": "ExpiryDate,StrikePrice,CallPutOption",
"top": 1000,
},
optional_placeholders=(
"strike_price",
"expiry_date",
"put_call",
"contract_type",
),
filter="UnderlyingQuoteRIC eq '#{ric}' and RCSAssetCategoryLeaf eq 'Option' and IsChain eq false {{if strike_price is not none}} and StrikePrice eq #{strike_price}{{endif}} {{if expiry_date}} and ExpiryDate eq #{expiry_date}{{endif}} {{if put_call}} and CallPutOption eq '#{put_call}'{{endif}} {{if contract_type}} and ContractType eq '#{contract_type}'{{endif}}",
)
def _search_kwargs(
self,
*,
ric,
contract_type=None,
expiry_date=None,
put_call=None,
strike_price=None,
**kwargs,
) -> dict:
return super()._search_kwargs(
contract_type=contract_type,
expiry_date=expiry_date,
put_call=put_call,
ric=ric,
strike_price=strike_price,
**kwargs,
)
def search(
self,
*,
ric,
contract_type=None,
expiry_date=None,
put_call=None,
strike_price=None,
**kwargs,
) -> pd.DataFrame:
"""Launch search, get DataFrame
Parameters
----------
ric
strike_price
expiry_date
put_call
contract_type
Returns
-------
DataFrame
Default columns: RIC, DTSubjectName, ExpiryDateString, StrikePrice, CallPutOption, ContractType, Currency
"""
return super().search(
contract_type=contract_type,
expiry_date=expiry_date,
put_call=put_call,
ric=ric,
strike_price=strike_price,
**kwargs,
)
class UnderlyingRICToFutureTemplate(DiscoverySearchTemplate):
"""Simple search for futures on any underlying"""
def __init__(self):
super().__init__(
name="UnderlyingRICToFuture",
pass_through_defaults={
"view": "SearchAll",
"select": "RIC,DTSubjectName,ExpiryDateString,ContractType,Currency",
"order_by": "ExpiryDate",
"top": 1000,
},
filter="UnderlyingQuoteRIC eq '#{ric}' and RCSAssetCategoryLeaf eq 'Future' and IsChain eq false and AssetStateName eq 'Active'",
)
def _search_kwargs(self, *, ric, **kwargs) -> dict:
return super()._search_kwargs(ric=ric, **kwargs)
def search(self, *, ric, **kwargs) -> pd.DataFrame:
"""Launch search, get DataFrame
Parameters
----------
ric
The underlying instrument for which to search for futures.
Returns
-------
DataFrame
Default columns: RIC, DTSubjectName, ExpiryDateString, ContractType, Currency
"""
return super().search(ric=ric, **kwargs)
class RICToIssuerTemplate(DiscoverySearchTemplate):
"""Find issuer of a RIC"""
def __init__(self):
super().__init__(
name="RICToIssuer",
pass_through_defaults={
"view": "SearchAll",
"select": "DTSubjectName,RCSIssuerCountryLeaf,IssuerOAPermID,PrimaryRIC",
"top": 1000,
},
filter="RIC eq '#{ric}'",
)
def _search_kwargs(self, *, ric, **kwargs) -> dict:
return super()._search_kwargs(ric=ric, **kwargs)
def search(self, *, ric, **kwargs) -> pd.DataFrame:
"""Launch search, get DataFrame
Parameters
----------
ric
The RIC for which to search the issuer.
Returns
-------
DataFrame
Default columns: DTSubjectName, RCSIssuerCountryLeaf, IssuerOAPermID, PrimaryRIC
"""
return super().search(ric=ric, **kwargs)
class FutureRICToFutureTemplate(DiscoverySearchTemplate):
"""From one future RIC, find all other futures on its underlying"""
def __init__(self):
super().__init__(
name="FutureRICToFuture",
pass_through_defaults={
"view": "SearchAll",
"select": "RicRoot,RIC,DTSubjectName,ExpiryDateString,ContractType,Currency",
"order_by": "GrossTonnage desc",
"top": 1000,
},
filter="RicRoot eq '#{_.GetRoot.RicRoot.0}' and RCSAssetCategoryLeaf xeq 'Equity Future' and IsChain eq false and AssetStateName eq 'Active' and DisplayType ne 'CONTN'",
ns=Namespace(
_=Namespace(
GetRoot=DiscoverySearchTemplate(
pass_through_defaults={
"view": "SearchAll",
"select": "RicRoot",
"top": 1,
},
filter="RIC eq '#{ric}'",
)
)
),
)
def _search_kwargs(self, **kwargs) -> dict:
return super()._search_kwargs(**kwargs)
def search(self, **kwargs) -> pd.DataFrame:
"""Launch search, get DataFrame
Parameters
----------
ric
Returns
-------
DataFrame
Default columns: RicRoot, RIC, DTSubjectName, ExpiryDateString, ContractType, Currency
"""
return super().search(**kwargs)
class OrganisationPermIDToUPTemplate(DiscoverySearchTemplate):
"""Find ultimate parent of an organisation"""
def __init__(self):
super().__init__(
name="OrganisationPermIDToUP",
pass_through_defaults={
"view": "SearchAll",
"select": "UltimateParentCompanyOAPermID,UltimateParentOrganisationName",
"top": 1000,
},
filter="OAPermID eq '#{entity_id}'",
)
def _search_kwargs(self, *, entity_id, **kwargs) -> dict:
return super()._search_kwargs(entity_id=entity_id, **kwargs)
def search(self, *, entity_id, **kwargs) -> pd.DataFrame:
"""Launch search, get DataFrame
Parameters
----------
entity_id
PermID of the organisation.
Returns
-------
DataFrame
Default columns: UltimateParentCompanyOAPermID, UltimateParentOrganisationName
"""
return super().search(entity_id=entity_id, **kwargs) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/discovery/_search_templates/embedded.py | 0.877994 | 0.222383 | embedded.py | pypi |
from dataclasses import dataclass
from typing import Union, TYPE_CHECKING, Dict
from ._properties import Properties
from ._property_type import PropertyType
if TYPE_CHECKING:
from ._property import Property
from pandas import DataFrame
@dataclass
class SearchPropertyExplorerResponse:
"""
Response object that has stores requested properties data.
"""
hits_count: int
properties: Dict[str, "Property"]
df: "DataFrame"
navigators: "DataFrame"
def get_by_name(self, name: str) -> Properties:
"""
Browse the properties names that have relative match with specified query. Results are represented
as the dataframe and dict of objects.
Parameters
----------
name: str
String to specify expected properties data.
Returns
-------
Properties
Examples
--------
>>> from refinitiv.data.discovery._search_explorer import SearchPropertyExplorer
>>> from refinitiv.data.content import search
>>> explorer = SearchPropertyExplorer()
>>> santander_bonds = explorer.get_properties_for(
... view=search.Views.GOV_CORP_INSTRUMENTS,
... query="santander bonds",
... filter="IsPerpetualSecurity ne true and IsActive eq true and not(AssetStatus in ('MAT' 'DC'))",
... )
>>> active = santander_bonds.get_properties_object("active")
"""
return Properties(df=self.get_properties_df(name), properties=self.get_properties_object(name))
def get_properties_object(self, name: Union[str, bool, int]) -> Dict[str, "Property"]:
"""
Browse the properties names that have relative match with specified query. Results are represented
as dict of objects.
Parameters
----------
name: Union[str, bool, int]
Argument to specify expected properties data.
Returns
-------
dict of Property objects
Examples
--------
>>> from refinitiv.data.discovery._search_explorer import SearchPropertyExplorer
>>> from refinitiv.data.content import search
>>> explorer = SearchPropertyExplorer()
>>> santander_bonds = explorer.get_properties_for(
... view=search.Views.GOV_CORP_INSTRUMENTS,
... query="santander bonds",
... filter="IsPerpetualSecurity ne true and IsActive eq true and not(AssetStatus in ('MAT' 'DC'))",
... )
>>> active = santander_bonds.get_properties_object("active")
"""
name = str(name).lower()
return {
prop_name: prop_value for prop_name, prop_value in self.properties.items() if name in str(prop_name).lower()
}
def get_properties_df(self, name: str) -> "DataFrame":
"""
Browse the properties names that have relative match with specified query.
Parameters
----------
name: str
String to specify expected properties data.
Returns
-------
pd.DataFrame
Examples
--------
>>> from refinitiv.data.discovery._search_explorer import SearchPropertyExplorer
>>> from refinitiv.data.content import search
>>> explorer = SearchPropertyExplorer()
>>> santander_bonds = explorer.get_properties_for(
... view=search.Views.GOV_CORP_INSTRUMENTS,
... query="santander bonds",
... filter="IsPerpetualSecurity ne true and IsActive eq true and not(AssetStatus in ('MAT' 'DC'))",
... )
>>> active = santander_bonds.get_properties_df("active")
"""
return self.df.loc[self.df.Property.str.contains(name.replace(" ", ""), na=False, case=False)]
def get_by_type(self, property_type: Union[str, PropertyType]) -> Properties:
"""
Browse the types that match the specified query. Results are represented as the dataframe and dict of objects.
Parameters
----------
property_type: str, PropertyType
Argument to specify expected properties data.
Returns
-------
Properties
Examples
--------
>>> from refinitiv.data.discovery._search_explorer import SearchPropertyExplorer
>>> from refinitiv.data.content import search
>>> explorer = SearchPropertyExplorer()
>>> santander_bonds = explorer.get_properties_for(
... view=search.Views.GOV_CORP_INSTRUMENTS,
... query="santander bonds",
... filter="IsPerpetualSecurity ne true and IsActive eq true and not(AssetStatus in ('MAT' 'DC'))",
... )
>>> str_properties = santander_bonds.get_by_type(PropertyType.String)
"""
return Properties(
df=self.df.loc[self.df.Type.str.contains(property_type, na=False, case=False)],
properties={
prop_name: prop_value
for prop_name, prop_value in self.properties.items()
if str(prop_value.type) == property_type
},
)
def get_by_value(self, value: Union[str, bool, int]) -> Properties:
"""
Browse the properties example values that match the specified query. Results are represented
as the dataframe and dict of objects.
Parameters
----------
value: str, bool, int
Argument to specify expected properties data.
Returns
-------
Properties
Examples
--------
>>> from refinitiv.data.discovery._search_explorer import SearchPropertyExplorer
>>> from refinitiv.data.content import search
>>> explorer = SearchPropertyExplorer()
>>> santander_bonds = explorer.get_properties_for(
... view=search.Views.GOV_CORP_INSTRUMENTS,
... query="santander bonds",
... filter="IsPerpetualSecurity ne true and IsActive eq true and not(AssetStatus in ('MAT' 'DC'))",
... )
>>> active = santander_bonds.get_by_value("active")
"""
if isinstance(value, bool):
value = str(value)
result = self.get_by_type(PropertyType.Boolean)
df = result.df
properties = Properties(
df=df[df["Example Value"] == value],
properties={
prop_name: prop_value
for prop_name, prop_value in result.properties.items()
if prop_value.value == value
},
)
elif isinstance(value, str) or isinstance(value, int):
value = str(value)
lower = str.lower
value_lower = value.lower()
properties = Properties(
df=self.df.loc[self.df["Example Value"].str.contains(value, na=False, case=False)],
properties={
prop_name: prop_value
for prop_name, prop_value in self.properties.items()
if value_lower in lower(prop_value.value)
},
)
else:
raise ValueError("Invalid data type. Please provide number, boolean or string.")
return properties
def get_navigable(self, prop: str = None) -> "Properties":
"""
Browse all navigable properties, narrow down results by specifying name of navigable property. Results are
represented as the dataframe and dict of objects.
Parameters
----------
prop: str
String to specify expected properties data.
Returns
-------
Properties
Examples
--------
>>> from refinitiv.data.discovery._search_explorer import SearchPropertyExplorer
>>> from refinitiv.data.content import search
>>> explorer = SearchPropertyExplorer()
>>> santander_bonds = explorer.get_properties_for(
... view=search.Views.GOV_CORP_INSTRUMENTS,
... query="santander bonds",
... filter="IsPerpetualSecurity ne true and IsActive eq true and not(AssetStatus in ('MAT' 'DC'))",
... )
>>> navigable = santander_bonds.get_navigable()
"""
if prop is None:
properties = self.properties
else:
properties = self.get_properties_object(prop)
df = self.df.loc[self.df["Navigable"] == "True"]
if prop:
df = df.loc[df.Property.str.contains(prop, na=False, case=False)]
return Properties(
df=df,
properties={
prop_name: prop_value for prop_name, prop_value in properties.items() if prop_value.navigable is True
},
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/discovery/_search_explorer/_search_explorer_response.py | 0.944228 | 0.378746 | _search_explorer_response.py | pypi |
from typing import Union
from ._df_builder import build_navigators_df, build_search_df, build_metadata_df, merge_metadata_df_and_search_df
from ._property import create_properties
from ._search_explorer_response import SearchPropertyExplorerResponse
from ...content import search
from ...content.search import Views
class SearchPropertyExplorer:
"""
SearchPropertyExplorer object provides ability to get search data and metadata by
merging responses from two requests.
"""
@staticmethod
def get_properties_for(
query: str = None,
filter: str = None,
view: Union[Views, str] = Views.SEARCH_ALL,
order_by: str = None,
navigators: str = None,
) -> SearchPropertyExplorerResponse:
"""
Retrieve search data and metadata. Transform results, create
properties and navigators objects, merge responses into single object.
Parameters
----------
query: str, optional
Keyword argument for view.
view: Views or str, optional
Picks a subset of the data universe to search against.
Default: Views.SEARCH_ALL
filter: str, optional
Filter values are boolean predicate expressions that can be defined with help
of metadata for building more precise requests.
order_by: str, optional
Defines the order in which matching documents should be returned.
navigators: str, optional
This can name one or more properties, separated by commas, each of which must
be Navigable. It returns supplemental information about the distribution of the whole matched set.
Returns
-------
SearchPropertyExplorerResponse
Examples
--------
>>> from refinitiv.data.discovery._search_explorer import SearchPropertyExplorer
>>> explorer = SearchPropertyExplorer()
>>> santander_bonds = explorer.get_properties_for(
... view=search.Views.GOV_CORP_INSTRUMENTS,
... query="santander bonds",
... filter="IsPerpetualSecurity ne true and IsActive eq true and not(AssetStatus in ('MAT' 'DC'))"
... )
"""
search_response = search.Definition(
view=view,
query=query,
filter=filter,
top=1,
select="_debugall",
order_by=order_by,
navigators=navigators,
).get_data()
search_raw = search_response.data.raw
search_total = search_response.total
navigators = build_navigators_df(search_raw)
search_df = build_search_df(search_raw, search_total)
metadata_response = search.metadata.Definition(view=view).get_data()
metadata_raw = metadata_response.data.raw
metadata_df = build_metadata_df(metadata_raw["Properties"], search_df)
request_arguments = {
"query": query,
"filter": filter,
"view": view,
"order_by": order_by,
"navigators": navigators,
}
df = merge_metadata_df_and_search_df(metadata_df, search_df)
return SearchPropertyExplorerResponse(
hits_count=search_total,
properties=create_properties(df, request_arguments, search_response, metadata_response),
df=df,
navigators=navigators,
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/discovery/_search_explorer/_search_explorer.py | 0.927667 | 0.309845 | _search_explorer.py | pypi |
from dataclasses import dataclass
from typing import Union, Dict
import pandas as pd
from ._buckets_data import BucketsData, get_counts_labels_filters
from ._navigator import Navigator
from ..._errors import RDError
from ...content import search
from ...delivery._data._response import Response
def to_bool(value: str) -> Union[bool, None]:
if value == "True":
return True
if value == "False":
return False
return pd.NA
def create_properties(
df: pd.DataFrame,
request_arguments: dict,
search_response: Response,
metadata_response: Response,
) -> Dict[str, "Property"]:
properties = {}
for data in df.values.tolist():
name = data[0]
properties[name] = Property(
name=name,
value=data[1],
type=data[2],
searchable=to_bool(data[3]),
sortable=to_bool(data[4]),
navigable=to_bool(data[5]),
groupable=to_bool(data[6]),
exact=to_bool(data[7]),
symbol=to_bool(data[8]),
request_arguments=request_arguments,
_search_response=search_response,
_metadata_response=metadata_response,
)
return properties
@dataclass
class Property:
"""Property object that has data and metadata for specific property."""
name: str
value: str
type: str
searchable: Union[bool, str]
sortable: Union[bool, str]
navigable: Union[bool, str]
groupable: Union[bool, str]
exact: Union[bool, str]
symbol: Union[bool, str]
request_arguments: dict
_search_response: Response
_metadata_response: Response
def get_possible_values(self) -> Navigator:
"""
Retrieves the navigator data
Returns
-------
Navigator object
Examples
--------
>>> from refinitiv.data.discovery._search_explorer import SearchPropertyExplorer
>>> explorer = SearchPropertyExplorer()
>>> santander_bonds = explorer.get_properties_for(
... view=search.Views.GOV_CORP_INSTRUMENTS,
... query="santander bonds",
... filter="IsPerpetualSecurity ne true and IsActive eq true and not(AssetStatus in ('MAT' 'DC'))",
... )
>>> rcs_issuer_country = santander_bonds.properties["RCSIssuerCountry"].get_possible_values()
"""
raw_navigators = self._search_response.data.raw.get("Navigators", {})
if self.name not in raw_navigators:
definition = search.Definition(
view=self.request_arguments["view"],
query=self.request_arguments["query"],
filter=self.request_arguments["filter"],
top=1,
select="_debugall",
order_by=self.request_arguments["order_by"],
navigators=self.name,
)
response = definition.get_data()
raw_navigators = response.data.raw.get("Navigators")
if not raw_navigators:
error = RDError(-1, f"Possible values could not be reached, {self.name} property is not navigable.")
error.response = response
raise error
navigator_name, navigator_data = next(iter(raw_navigators.items()))
counts, labels, filters = get_counts_labels_filters(navigator_data["Buckets"])
data = {navigator_name: labels, "Count": counts}
if filters:
data["Filter"] = filters
return Navigator(df=pd.DataFrame(data), navigator=BucketsData(name=navigator_name, value=labels, count=counts)) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/discovery/_search_explorer/_property.py | 0.912175 | 0.298459 | _property.py | pypi |
import copy
import re
from typing import Tuple, List, Union
from pandas import DataFrame, NA
from ._relationship_type import RelationshipType
from ._stakeholder_data import Customer, Supplier, StakeholderData
from ..._tools import convert_df_columns_to_datetime_re
from ...content import fundamental_and_reference, symbol_conversion
from ...delivery._data._response import Response
data_class_by_relationship_type = {
RelationshipType.CUSTOMER: Customer,
RelationshipType.SUPPLIER: Supplier,
}
FIELDS = (
"TR.SCRelationship",
"TR.SCRelationship.ScorgIDOut",
"TR.SCRelationshipConfidenceScore",
"TR.SCRelationshipFreshnessScore",
"TR.SCRelationshipUpdateDate",
)
FIELD_TO_UPDATE = (
"TR.IsPublic",
"TR.CommonName",
"TR.HeadquartersCountry",
"TR.TRBCIndustry",
"TR.CreditRatioImpliedRating",
"TR.PCSmartRatiosImpliedRating",
)
TO_SYMBOL_TYPES = [
symbol_conversion.SymbolTypes.RIC,
symbol_conversion.SymbolTypes.ISIN,
symbol_conversion.SymbolTypes.CUSIP,
symbol_conversion.SymbolTypes.SEDOL,
]
STAKEHOLDERS_DATE_PATTERN = re.compile(r".*Date")
def get_fundamental_data(instrument: Union[list, tuple], fields=None) -> Response:
if fields is None:
fields = FIELDS
response = fundamental_and_reference.Definition(universe=instrument, fields=fields).get_data()
return response
def get_symbol_conversion_data(ric_list: Union[list, tuple]) -> Response:
response = symbol_conversion.Definition(
symbols=ric_list,
from_symbol_type=symbol_conversion.SymbolTypes.OA_PERM_ID,
to_symbol_types=TO_SYMBOL_TYPES,
).get_data()
return response
def get_df_column(fund_data_column: list, symbol_data: dict):
column = copy.copy(fund_data_column)
current_symbol_data = symbol_data.get(column[3], {})
column = (
column[:5]
+ [
# current_symbol_data.get("DocumentTitle", "").split(",")[0],
current_symbol_data.get("RIC"),
current_symbol_data.get("IssueISIN"),
current_symbol_data.get("CUSIP"),
current_symbol_data.get("SEDOL"),
]
+ column[5:]
)
if column[1] in (None, ""):
column[1] = NA
return column
def get_columns(first_fund_response: "Response", second_fund_response: "Response"):
fund_headers = copy.copy(first_fund_response.data.raw["headers"][0])
fund_headers.insert(1, second_fund_response.data.raw["headers"][0][1])
fund_headers.insert(4, second_fund_response.data.raw["headers"][0][2])
fund_headers.extend(second_fund_response.data.raw["headers"][0][3:])
columns = [i["displayName"] for i in fund_headers]
columns = columns[:5] + ["RIC", "IssueISIN", "CUSIP", "SEDOL"] + columns[5:]
return columns
def update_fund_data(fund_data: Union[list, tuple], fund_response: "Response"):
fundamental_org_data = {i[0]: i[1:] for i in fund_response.data.raw["data"]}
for i, v in enumerate(fund_data):
data_for_update = fundamental_org_data.get(v[2], [None] * 6)
v.insert(1, data_for_update[0])
v.insert(4, data_for_update[1])
v.extend(data_for_update[2:])
def fetch_data(
instrument: Union[str, list], relationship_type: RelationshipType
) -> Tuple[List[StakeholderData], DataFrame]:
first_fund_response = get_fundamental_data(instrument)
fund_data = tuple(
filter(lambda elem: elem[1] == relationship_type.value, first_fund_response.data.raw.get("data", []))
)
# fund_data -> (['VOD.L', 'Supplier', '4295858439', 0.2648416, 1, '2013-06-04'], ...)
ric_list = tuple(map(lambda elem: elem[2], fund_data))
# ric_list -> ["5000051106", ...]
second_fund_response = get_fundamental_data(ric_list, fields=FIELD_TO_UPDATE)
update_fund_data(fund_data, second_fund_response)
# fund_data -> [['VOD.L', True, 'Supplier', '4295858439', 0.2648416, 1, '2013-06-04', 'Australia', 'Real Estate Rental, Development & Operations', 'BBB+', ''], ...]
symbol_response = get_symbol_conversion_data(ric_list)
symbol_data = symbol_response.data.raw.get("Matches")
# symbol_data -> {"5000051106": {"DocumentTitle": "Indian", "RIC": "IOTL.NS"}, ...}
data_for_df = []
stakeholders = []
for stakeholder_data in fund_data:
stakeholder = data_class_by_relationship_type[relationship_type].from_list(stakeholder_data)
stakeholder_symbol_data = symbol_data.get(stakeholder.related_organization_id)
if stakeholder_symbol_data:
stakeholder.update(stakeholder_symbol_data)
stakeholders.append(stakeholder)
data_for_df.append(get_df_column(stakeholder_data, symbol_data))
columns = get_columns(first_fund_response, second_fund_response)
df = DataFrame(data_for_df, columns=columns).convert_dtypes()
df = convert_df_columns_to_datetime_re(df, STAKEHOLDERS_DATE_PATTERN)
return stakeholders, df | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/discovery/_stakeholders/_fetch_data.py | 0.598547 | 0.321793 | _fetch_data.py | pypi |
from typing import TYPE_CHECKING
from ._chain import default_error_message
from ._universe_expander import UniverseExpander
from ..._core.session import get_default
from ..._errors import RDError
from ..._tools import DEBUG
from ..._tools import cached_property
from ...content import fundamental_and_reference
from ...content._content_data import Data
if TYPE_CHECKING:
from logging import Logger
def update_universe(raw, _universe):
index = 0 # instrument
data = raw.get("data")
if data and all(isinstance(i[index], str) for i in data):
universe = [i[index] for i in data]
else:
universe = _universe
return universe
def get_universe(expression):
session = get_default()
logger = session.logger()
adc_data = get_adc_data(
params={
"universe": expression,
"fields": "TR.RIC",
},
logger=logger,
)
adc_raw = adc_data.raw
return update_universe(
adc_raw,
None,
)
def get_adc_data(params: dict, logger: "Logger") -> Data:
"""
Gets data from ADC endpoint.
Parameters
----------
params : dict
API request parameters.
logger : Logger
Session logger.
Returns
-------
response : Data
API response data.
"""
fields = params.get("fields", "")
universe = params["universe"]
logger.info(f"Requesting {fields} for {universe}")
response = fundamental_and_reference.Definition(**params).get_data()
DEBUG and logger.debug(f"ADC --->\n{response.data.df.to_string()}\n")
request_messages = response.request_message
statuses = response.http_status
if not isinstance(response.request_message, list):
request_messages = [response.request_message]
statuses = [response.http_status]
for request, status in zip(request_messages, statuses):
path = request.url.path
current_universe = path.rsplit("/", 1)[-1]
if current_universe not in universe:
current_universe = universe
logger.info(f"Request to {path} with {fields} for {current_universe}\nstatus: {status}\n")
return response.data
class DiscoveryUniverse(UniverseExpander):
def __init__(self, expression):
self._expression = expression
@property
def expression(self):
return self._expression
@cached_property
def _universe(self):
universe = get_universe(self._expression)
if not universe:
raise RDError(-1, default_error_message)
return universe | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/discovery/_universe_expanders/_discovery_universe.py | 0.738198 | 0.25105 | _discovery_universe.py | pypi |
from dataclasses import dataclass, field
from threading import Event
from typing import Tuple, List, TYPE_CHECKING
from ._universe_expander import UniverseExpander
from ..._core.session import get_default
from ..._errors import RDError, ScopeError
from ..._tools import cached_property
from ...content import fundamental_and_reference
from ...content.pricing import chain
from ...content.pricing.chain._stream_facade import Stream
from ...delivery import endpoint_request
from ...delivery._data._data_provider_layer import _check_response
if TYPE_CHECKING:
from ...delivery._data._response import Response
default_error_message = "No values to unpack"
def on_error_callback(data: Tuple[dict], universe: str, stream: Stream):
state = data[0].get("State", {})
message = state.get("Text", default_error_message)
code = state.get("Code", -1)
stream.close()
raise RDError(code, f"{message}\nuniverse: {universe}")
def get_chain_data_from_stream(name):
constituents, summary_links = [], []
is_stream_closed = Event()
def on_complete(_, stream):
nonlocal constituents, summary_links, is_stream_closed
stream.close()
constituents, summary_links = stream.constituents, stream.summary_links
is_stream_closed.set()
chain_stream = chain.Definition(name).get_stream()
chain_stream.on_complete(on_complete)
chain_stream.on_error(on_error_callback)
chain_stream.open()
is_stream_closed.wait()
if not constituents:
raise RDError(-1, default_error_message)
return ChainData(constituents, summary_links)
def get_chain_data_from_adc(name):
adc_response = fundamental_and_reference.Definition(universe=name, fields=["TR.RIC"]).get_data()
constituents = [item[0] for item in adc_response.data.raw.get("data", {})]
return ChainData(constituents)
def _get_constituents(response: "Response") -> list:
return response.data.raw.get("data", {}).get("constituents", [])
def get_chain_data_from_chain_endpoint(name):
url = "/data/pricing/chains/v1/"
session = get_default()
session.verify_scope(url, "get")
chain_response = endpoint_request.Definition(url=url, query_parameters={"universe": name}).get_data()
_check_response(chain_response, session.config)
summary_links = []
constituents = []
for item in _get_constituents(chain_response):
if item.startswith(".") or item.startswith("/") or item.endswith("="):
summary_links.append(item)
else:
constituents.append(item)
next_link = chain_response.data.raw.get("meta", {}).get("nextLink")
while next_link:
chain_response = endpoint_request.Definition(
url=url, query_parameters={"universe": name, "target": next_link}
).get_data()
next_link = chain_response.data.raw.get("meta", {}).get("nextLink")
constituents.extend(_get_constituents(chain_response))
return ChainData(constituents, summary_links)
def get_chain_data(name):
for func in (
get_chain_data_from_stream,
get_chain_data_from_adc,
):
try:
return func(name)
except ScopeError:
continue
return get_chain_data_from_chain_endpoint(name)
@dataclass
class ChainData:
constituents: List = field(default_factory=lambda: [])
summary_links: List = field(default_factory=lambda: [])
class Chain(UniverseExpander):
"""
Class to get data from chain.
Parameters
----------
name : str
chain name
Examples
--------
>>> chain = Chain("0#.DJI")
>>> print(list(chain))
>>> print(chain.constituents)
>>> print(chain.summary_links)
"""
def __init__(self, name):
self._name = name
@cached_property
def _chains(self):
return get_chain_data(self._name)
@property
def name(self):
return self._name
@property
def summary_links(self):
return self._chains.summary_links
@property
def constituents(self):
return self._chains.constituents
@property
def _universe(self):
return self.constituents | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/discovery/_universe_expanders/_chain.py | 0.728072 | 0.164416 | _chain.py | pypi |
from itertools import product
from typing import List, Dict
import pandas as pd
from ._historical_raw_transf import transform_for_df_by_fields, transform_for_df_by_headers_names
from .._tools._dataframe import convert_dtypes
from .._types import Strings
def process_bad_raws(bad_raws, listofcolumns, last_raw_columns, fields, num_fields):
listofcolumns_insert = listofcolumns.insert
for idx, bad_raw in bad_raws:
raw_columns = fields or last_raw_columns or "Field"
inst_name = bad_raw["universe"]["ric"]
if num_fields == 1:
processed_columns = [inst_name]
else:
processed_columns = list(product([inst_name], raw_columns))
listofcolumns_insert(idx, processed_columns)
def process_data(data_append, index_append, date, items, num_allcolumns, num_raws, left_num_columns):
prev_idx = None
counter = 0
template = [pd.NA] * num_allcolumns
for instidx, raw_data, raw_columns in items:
if (counter != 0 and counter % num_raws == 0) or prev_idx == instidx:
index_append(date)
data_append(template)
template = [pd.NA] * num_allcolumns
prev_idx = instidx
if prev_idx is None:
prev_idx = instidx
counter += 1
left_idx = left_num_columns[instidx]
right_idx = left_idx + len(raw_columns)
for item, i in zip(raw_data, range(left_idx, right_idx)):
template[i] = item
index_append(date)
data_append(template)
class HistoricalBuilder:
def _prepare_columns(self, raws, listofcolumns, bad_raws, fields, universe, items_by_date, num_fields):
columns = None
listofcolumns_append = listofcolumns.append
bad_raws_append = bad_raws.append
for instidx, raw in enumerate(raws):
# it means error in response for custom instruments
if not raw:
raw = {"universe": {"ric": universe[instidx]}}
bad_raws_append((instidx, raw))
continue
# it means error in response for historical pricing
if isinstance(raw, list):
raw = raw[0]
bad_raws_append((instidx, raw))
continue
# it means in response for historical pricing events
if isinstance(raw, dict) and not raw.get("headers"):
raw = {"universe": {"ric": universe[instidx]}}
bad_raws_append((instidx, raw))
continue
else:
if fields:
transformed = transform_for_df_by_fields(raw, fields)
else:
transformed = transform_for_df_by_headers_names(raw)
columns = transformed.fields
for date, raw_data in zip(transformed.dates, transformed.data):
items = items_by_date.setdefault(date, [])
items.append((instidx, raw_data, columns))
inst_name = raw["universe"]["ric"]
if num_fields == 1:
processed_columns = [inst_name]
else:
processed_columns = list(product([inst_name], columns))
listofcolumns_append(processed_columns)
return columns
def build_one(self, raw: dict, fields: Strings, axis_name: str, **__) -> pd.DataFrame:
if not raw["data"]:
return pd.DataFrame()
if fields:
transformed = transform_for_df_by_fields(raw, fields)
else:
transformed = transform_for_df_by_headers_names(raw)
data = transformed.data
columns = transformed.fields
index = transformed.dates
inst_name = raw["universe"]["ric"]
columns = pd.Index(data=columns, name=inst_name)
index = pd.Index(data=index, name=axis_name)
df = pd.DataFrame(data=data, columns=columns, index=index)
df = convert_dtypes(df)
df.sort_index(inplace=True)
return df
def build(self, raws: List[dict], universe: Strings, fields: Strings, axis_name: str, **__) -> pd.DataFrame:
items_by_date: Dict[str, list] = {}
listofcolumns = []
num_raws = len(raws)
bad_raws = []
num_fields = len(fields)
last_raw_columns = self._prepare_columns(
raws, listofcolumns, bad_raws, fields, universe, items_by_date, num_fields
)
if not items_by_date:
return pd.DataFrame()
if bad_raws:
process_bad_raws(bad_raws, listofcolumns, last_raw_columns, fields, num_fields)
left_num_columns = {
split_idx: sum([len(subcols) for subcols in listofcolumns[:split_idx]]) for split_idx in range(num_raws)
}
allcolumns = [col for subcolumns in listofcolumns for col in subcolumns]
num_allcolumns = len(allcolumns)
data = []
index = []
data_append = data.append
index_append = index.append
for date, items in items_by_date.items():
num_items = len(items)
if num_items > 1:
process_data(
data_append,
index_append,
date,
items,
num_allcolumns,
num_raws,
left_num_columns,
)
else:
index_append(date)
instidx, raw_data, raw_columns = items[0]
left = [pd.NA] * left_num_columns[instidx]
right = [pd.NA] * (num_allcolumns - len(raw_columns) - len(left))
data_append(left + raw_data + right)
if num_fields == 1:
columns = pd.Index(data=allcolumns, name=fields[0])
else:
columns = pd.MultiIndex.from_tuples(allcolumns)
index = pd.Index(data=index, name=axis_name)
df = pd.DataFrame(data=data, columns=columns, index=index)
df = convert_dtypes(df)
df.sort_index(inplace=True)
return df
class CustomInstsBuilder(HistoricalBuilder):
def build_one(self, raw: dict, fields: Strings, axis_name: str, **__) -> pd.DataFrame:
if fields:
transformed = transform_for_df_by_fields(raw, fields)
else:
transformed = transform_for_df_by_headers_names(raw)
data = transformed.data
columns = transformed.fields
index = transformed.dates
if all(i is pd.NA for j in data for i in j):
return pd.DataFrame()
inst_name = raw["universe"]["ric"]
columns = pd.Index(data=columns, name=inst_name)
index = pd.Index(data=index, name=axis_name)
df = pd.DataFrame(data=data, columns=columns, index=index)
df = convert_dtypes(df)
df.sort_index(inplace=True)
return df
historical_builder = HistoricalBuilder()
custom_insts_builder = CustomInstsBuilder() | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/_historical_df_builder.py | 0.527803 | 0.309232 | _historical_df_builder.py | pypi |
from itertools import zip_longest
from typing import TYPE_CHECKING, Union
from ._content_response_factory import ContentResponseFactory
from ..delivery._data._endpoint_data import Error
from ..delivery._data._response_factory import get_closure
if TYPE_CHECKING:
from ..delivery._data._data_provider import ParsedData
error_message_by_code = {
"default": "{error_message}. Requested ric: {rics}. Requested fields: {fields}",
"TS.Intraday.UserRequestError.90001": "{rics} - The universe is not found",
"TS.Intraday.Warning.95004": "{rics} - Trades interleaving with corrections is currently not supported. Corrections will not be returned.",
"TS.Intraday.UserRequestError.90006": "{error_message} Requested ric: {rics}",
}
class HistoricalResponseFactory(ContentResponseFactory):
@staticmethod
def _write_error(error_code, error_message, rics, parsed_data: "ParsedData", **kwargs):
error_messages = error_message_by_code.get(error_code, "default").format(
error_message=error_message,
rics=rics,
fields=kwargs.get("fields"),
)
parsed_data.error_codes = error_code
parsed_data.error_messages = error_messages
@staticmethod
def _try_write_error(parsed_data: "ParsedData", **kwargs):
if isinstance(parsed_data.content_data, list) and len(parsed_data.content_data) == 1:
raw = parsed_data.content_data[0]
else:
raw = {}
error_code = parsed_data.first_error_code or raw.get("status", {}).get("code")
if error_code:
parsed_data.error_codes = error_code
parsed_data.error_messages = error_message_by_code.get(error_code, error_message_by_code["default"]).format(
error_message=parsed_data.first_error_message or raw.get("status", {}).get("message"),
rics=raw.get("universe", {}).get("ric", kwargs.get("universe")),
fields=kwargs.get("fields"),
)
def get_raw(self, parsed_data: "ParsedData") -> dict:
return parsed_data.content_data[0]
def create_success(self, parsed_data: "ParsedData", **kwargs):
self._try_write_error(parsed_data, **kwargs)
return super().create_success(parsed_data, **kwargs)
def create_fail(self, parsed_data: "ParsedData", **kwargs):
self._try_write_error(parsed_data, **kwargs)
return super().create_fail(parsed_data, **kwargs)
def _do_create_response(
self,
is_success: bool,
raw: Union[dict, list, str],
parsed_data: "ParsedData",
**kwargs,
):
http_response = parsed_data.raw_response
return self.response_class(
is_success,
request_message=[http_response.request],
http_response=[http_response],
http_headers=[http_response.headers],
http_status=[parsed_data.status],
errors=[Error(code, msg) for code, msg in zip_longest(parsed_data.error_codes, parsed_data.error_messages)],
closure=get_closure(http_response),
requests_count=1,
_data_factory=self,
_kwargs=kwargs,
_raw=raw,
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/_historical_response_factory.py | 0.686895 | 0.155591 | _historical_response_factory.py | pypi |
import abc
from datetime import timedelta
from typing import Callable, List, Any, Optional, TYPE_CHECKING
from ._content_data_factory import ContentDataFactory
from ._df_builder import build_empty_df
from .._content_type import ContentType
from .._tools import hp_datetime_adapter
from ..content._intervals import (
interval_arg_parser,
get_day_interval_type,
DayIntervalType,
Intervals,
)
from ..delivery._data._data_provider import Response
if TYPE_CHECKING:
from ..delivery._data._response import BaseResponse
INTERVALS_BY_SECONDS = {
Intervals.ONE_MINUTE: 59,
Intervals.FIVE_MINUTES: 299,
Intervals.TEN_MINUTES: 599,
Intervals.THIRTY_MINUTES: 1799,
Intervals.SIXTY_MINUTES: 3599,
Intervals.HOURLY: 3599,
}
EVENTS_MAX_LIMIT = 10000
def remove_last_date_elements(data: List[List[Any]]) -> List[List[Any]]:
end_date = data[-1][0]
for index, item in enumerate(data[::-1]):
if item[0] != end_date:
data = data[:-index]
return data
return data
class EntireDataProvider(abc.ABC):
@abc.abstractmethod
def request_with_dates(self, *args) -> Response:
pass
@abc.abstractmethod
def request_with_count(self, *args) -> Response:
pass
@abc.abstractmethod
def get_request_function(self, **kwargs) -> Optional[Callable]:
pass
@abc.abstractmethod
def get_request_function_async(self, **kwargs) -> Optional[Callable]:
pass
def get_data(self, provide_data: Callable, **kwargs) -> Response:
request_function = self.get_request_function(**kwargs)
if request_function:
response = request_function(provide_data, **kwargs)
else:
response = provide_data(**kwargs)
return response
async def get_data_async(self, provide_data: Callable, **kwargs) -> Response:
request_function = self.get_request_function_async(**kwargs)
if request_function:
response = await request_function(provide_data, **kwargs)
else:
response = await provide_data(**kwargs)
return response
class SummariesEntireDataProvider(EntireDataProvider):
def request_with_dates(
self,
provide_data: Callable,
interval,
start: str,
end: str,
count: Optional[int] = None,
**kwargs,
) -> Response:
interval_sec = INTERVALS_BY_SECONDS[interval_arg_parser.get_str(interval)]
entire_data = []
responses = []
unique_data_count = set()
last_raw = {}
finished_date = hp_datetime_adapter.get_localize(start)
# need do ... while
end_date = finished_date + timedelta(microseconds=1)
while end_date > finished_date and len(unique_data_count) <= 1:
response = provide_data(
interval=interval,
count=count,
start=start,
end=end,
**kwargs,
)
responses.append(response)
if not response.is_success:
break
raw = response.data.raw
last_raw = raw
if len(raw) == 0 or not raw.get("data"):
break
data = list(raw["data"])
entire_data.extend(data)
if count is not None and len(entire_data) >= count:
entire_data = entire_data[:count]
break
unique_data_count.add(len(data))
end_date = data[-1][0]
end = end_date
end_date = hp_datetime_adapter.get_localize(end_date)
if (end_date - finished_date).seconds < interval_sec:
break
return entire_create_response(
responses,
last_raw,
entire_data,
kwargs,
)
async def request_with_dates_async(
self,
provide_data: Callable,
interval,
start: str,
end: str,
count: Optional[int] = None,
**kwargs,
) -> Response:
interval_sec = INTERVALS_BY_SECONDS[interval_arg_parser.get_str(interval)]
entire_data = []
responses = []
unique_data_count = set()
last_raw = {}
finished_date = hp_datetime_adapter.get_localize(start)
# need do ... while
end_date = finished_date + timedelta(microseconds=1)
while end_date > finished_date and len(unique_data_count) <= 1:
response = await provide_data(
interval=interval,
count=count,
start=start,
end=end,
**kwargs,
)
responses.append(response)
if not response.is_success:
break
raw = response.data.raw
last_raw = raw
if len(raw) == 0 or not raw.get("data"):
break
data = list(raw["data"])
entire_data.extend(data)
if count is not None and len(entire_data) >= count:
entire_data = entire_data[:count]
break
unique_data_count.add(len(data))
end_date = data[-1][0]
end = end_date
end_date = hp_datetime_adapter.get_localize(end_date)
if (end_date - finished_date).seconds < interval_sec:
break
return entire_create_response(
responses,
last_raw,
entire_data,
kwargs,
)
def request_with_count(
self,
provide_data: Callable,
interval,
count: int,
end: str,
start: Optional[str] = None,
**kwargs,
) -> Response:
interval_sec = INTERVALS_BY_SECONDS[interval_arg_parser.get_str(interval)]
c = count
entire_data = []
responses = []
unique_data_count = set()
last_raw = {}
finished_date = None
if start:
finished_date = hp_datetime_adapter.get_localize(start)
while c > 0 and len(unique_data_count) <= 1:
response = provide_data(
interval=interval,
count=count,
start=start,
end=end,
**kwargs,
)
responses.append(response)
if not response.is_success:
break
raw = response.data.raw
last_raw = raw
if len(raw) == 0 or not raw.get("data"):
break
data = list(raw["data"])
entire_data.extend(data)
unique_data_count.add(len(data))
c -= len(data)
count = c
end_date = data[-1][0]
end = end_date
if finished_date:
end_date = hp_datetime_adapter.get_localize(end_date)
if (end_date - finished_date).seconds < interval_sec:
break
return entire_create_response(
responses,
last_raw,
entire_data,
kwargs,
)
async def request_with_count_async(
self,
provide_data: Callable,
interval,
count: int,
end: str,
start: Optional[str] = None,
**kwargs,
) -> Response:
interval_sec = INTERVALS_BY_SECONDS[interval_arg_parser.get_str(interval)]
c = count
entire_data = []
responses = []
unique_data_count = set()
last_raw = {}
finished_date = None
if start:
finished_date = hp_datetime_adapter.get_localize(start)
while c > 0 and len(unique_data_count) <= 1:
response = await provide_data(
interval=interval,
count=count,
start=start,
end=end,
**kwargs,
)
responses.append(response)
if not response.is_success:
break
raw = response.data.raw
last_raw = raw
if len(raw) == 0 or not raw.get("data"):
break
data = list(raw["data"])
entire_data.extend(data)
unique_data_count.add(len(data))
c -= len(data)
count = c
end_date = data[-1][0]
end = end_date
if finished_date:
end_date = hp_datetime_adapter.get_localize(end_date)
if (end_date - finished_date).seconds < interval_sec:
break
return entire_create_response(
responses,
last_raw,
entire_data,
kwargs,
)
def get_request_function(
self,
interval,
count: Optional[int] = None,
start: Optional[str] = None,
end: Optional[str] = None,
**kwargs,
) -> Optional[Callable]:
request_function = None
if interval is None or get_day_interval_type(interval) is not DayIntervalType.INTRA:
return request_function
if start is not None and end is not None:
request_function = self.request_with_dates
elif count is not None and count > 0:
request_function = self.request_with_count
return request_function
def get_request_function_async(
self,
interval,
count: Optional[int] = None,
start: Optional[str] = None,
end: Optional[str] = None,
**kwargs,
) -> Optional[Callable]:
request_function = None
if interval is None or get_day_interval_type(interval) is not DayIntervalType.INTRA:
return request_function
if start is not None and end is not None:
request_function = self.request_with_dates_async
elif count is not None and count > 0:
request_function = self.request_with_count_async
return request_function
class EventsEntireDataProvider(EntireDataProvider):
def request_with_dates(
self,
provide_data: Callable,
start: str,
end: str,
count: Optional[int] = None,
**kwargs,
) -> Response:
entire_data = []
responses = []
last_raw = {}
finished_date = hp_datetime_adapter.get_localize(start)
# need do ... while
end_date = finished_date + timedelta(microseconds=1)
response_count = EVENTS_MAX_LIMIT
while end_date > finished_date and response_count >= EVENTS_MAX_LIMIT:
response = provide_data(count=count, start=start, end=end, **kwargs)
responses.append(response)
if not response.is_success:
break
raw = response.data.raw
last_raw = raw
if len(raw) == 0 or not raw.get("data"):
break
data = list(raw["data"])
response_count = len(data)
if response_count >= EVENTS_MAX_LIMIT:
data = remove_last_date_elements(data)
entire_data.extend(data)
end_date = data[-1][0]
end = end_date
end_date = hp_datetime_adapter.get_localize(end_date)
if count is not None and len(entire_data) >= count:
entire_data = entire_data[:count]
break
return entire_create_response(
responses,
last_raw,
entire_data,
kwargs,
)
async def request_with_dates_async(
self,
provide_data: Callable,
start: str,
end: str,
count: Optional[int] = None,
**kwargs,
) -> Response:
entire_data = []
responses = []
last_raw = {}
finished_date = hp_datetime_adapter.get_localize(start)
# need do ... while
end_date = finished_date + timedelta(microseconds=1)
response_count = EVENTS_MAX_LIMIT
while end_date > finished_date and response_count >= EVENTS_MAX_LIMIT:
response = await provide_data(count=count, start=start, end=end, **kwargs)
responses.append(response)
if not response.is_success:
break
raw = response.data.raw
last_raw = raw
if len(raw) == 0 or not raw.get("data"):
break
data = list(raw["data"])
response_count = len(data)
if response_count >= EVENTS_MAX_LIMIT:
data = remove_last_date_elements(data)
entire_data.extend(data)
end_date = data[-1][0]
end = end_date
end_date = hp_datetime_adapter.get_localize(end_date)
if count is not None and len(entire_data) >= count:
entire_data = entire_data[:count]
break
return entire_create_response(
responses,
last_raw,
entire_data,
kwargs,
)
def request_with_count(self, provide_data: Callable, count: int, start: str, end: str, **kwargs) -> Response:
entire_data = []
responses = []
c = count
response_count = EVENTS_MAX_LIMIT
last_raw = {}
while c > 0 and response_count >= EVENTS_MAX_LIMIT:
response = provide_data(count=count, start=start, end=end, **kwargs)
responses.append(response)
if not response.is_success:
break
raw = response.data.raw
last_raw = raw
if len(raw) == 0 or not raw.get("data"):
break
data = list(raw["data"])
response_count = len(data)
if response_count >= EVENTS_MAX_LIMIT:
data = remove_last_date_elements(data)
entire_data.extend(data)
c -= len(data)
count = c
end_date = data[-1][0]
end = end_date
return entire_create_response(
responses,
last_raw,
entire_data,
kwargs,
)
async def request_with_count_async(
self, provide_data: Callable, count: int, start: str, end: str, **kwargs
) -> Response:
entire_data = []
responses = []
c = count
response_count = EVENTS_MAX_LIMIT
last_raw = {}
while c > 0 and response_count >= EVENTS_MAX_LIMIT:
response = await provide_data(count=count, start=start, end=end, **kwargs)
responses.append(response)
if not response.is_success:
break
raw = response.data.raw
last_raw = raw
if len(raw) == 0 or not raw.get("data"):
break
data = list(raw["data"])
response_count = len(data)
if response_count >= EVENTS_MAX_LIMIT:
data = remove_last_date_elements(data)
entire_data.extend(data)
c -= len(data)
count = c
end_date = data[-1][0]
end = end_date
return entire_create_response(
responses,
last_raw,
entire_data,
kwargs,
)
def get_request_function(
self,
count: Optional[int] = None,
start: Optional[str] = None,
end: Optional[str] = None,
**kwargs,
) -> Optional[Callable]:
request_function = None
if start is not None and end is not None:
request_function = self.request_with_dates
elif count is not None and count > EVENTS_MAX_LIMIT:
request_function = self.request_with_count
return request_function
def get_request_function_async(
self,
count: Optional[int] = None,
start: Optional[str] = None,
end: Optional[str] = None,
**kwargs,
) -> Optional[Callable]:
request_function = None
if start is not None and end is not None:
request_function = self.request_with_dates_async
elif count is not None and count > EVENTS_MAX_LIMIT:
request_function = self.request_with_count_async
return request_function
entire_data_provider_by_content_type = {
ContentType.HISTORICAL_PRICING_EVENTS: EventsEntireDataProvider(),
ContentType.CUSTOM_INSTRUMENTS_EVENTS: EventsEntireDataProvider(),
ContentType.HISTORICAL_PRICING_INTERDAY_SUMMARIES: SummariesEntireDataProvider(),
ContentType.HISTORICAL_PRICING_INTRADAY_SUMMARIES: SummariesEntireDataProvider(),
ContentType.CUSTOM_INSTRUMENTS_INTERDAY_SUMMARIES: SummariesEntireDataProvider(),
ContentType.CUSTOM_INSTRUMENTS_INTRADAY_SUMMARIES: SummariesEntireDataProvider(),
}
class EntireDataFactory(ContentDataFactory):
def get_dfbuilder(self, **__):
return build_empty_df
multi_entire_data_factory = EntireDataFactory()
def get_entire_data_provider(content_type: ContentType) -> EntireDataProvider:
entire_data_provider = entire_data_provider_by_content_type.get(content_type)
if not entire_data_provider:
raise ValueError(f"Cannot find entire data provider for {content_type}")
return entire_data_provider
def entire_create_response(responses: List["BaseResponse"], last_raw: dict, entire_data: list, kwargs) -> Response:
raw = dict(last_raw)
raw["data"] = entire_data
request_messages = []
http_responses = []
http_statuses = []
http_headers = []
errors = []
is_success = False
for response in responses:
is_success = is_success or response.is_success
if response.errors:
errors += response.errors
request_messages.extend(response.request_message)
http_responses.extend(response.http_response)
http_statuses.extend(response.http_status)
http_headers.extend(response.http_headers)
return Response(
is_success,
request_messages,
http_responses,
http_headers,
http_statuses,
errors,
closure=None,
requests_count=len(responses),
_data_factory=multi_entire_data_factory,
_kwargs=kwargs,
_raw=raw,
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/_entire_data_provider.py | 0.712932 | 0.185726 | _entire_data_provider.py | pypi |
from dataclasses import dataclass
from typing import List, Dict
import pandas as pd
from .._tools import convert_str_to_timestamp
from .._types import Strings, TimestampOrNaT
@dataclass
class _TransformedData:
data: List[List]
fields: Strings
dates: List[TimestampOrNaT]
@dataclass
class _ParsedData:
data: List[List]
headers_names: Strings
timestamp_idx: int
timestamp_name: str
def _parse_raw(raw: dict) -> _ParsedData:
headers_names = [header["name"] for header in raw["headers"]]
timestamp_name = None
if "DATE_TIME" in headers_names:
timestamp_name = "DATE_TIME"
elif "DATE" in headers_names:
timestamp_name = "DATE"
timestamp_idx = headers_names.index(timestamp_name)
return _ParsedData(raw["data"], headers_names, timestamp_idx, timestamp_name)
def transform_for_df_by_fields(raw: dict, fields: Strings) -> _TransformedData:
parsed = _parse_raw(raw)
headers_names = parsed.headers_names
timestamp_idx = parsed.timestamp_idx
data = []
dates = []
for lst in parsed.data:
date_item = lst[timestamp_idx]
dates.append(convert_str_to_timestamp(date_item))
newlst = []
for field in fields:
if field in headers_names:
item = lst[headers_names.index(field)]
newlst.append(pd.NA if item is None else item)
else:
newlst.append(pd.NA)
data.append(newlst)
return _TransformedData(data, fields, dates)
def transform_for_df_by_headers_names(raw: dict) -> _TransformedData:
parsed = _parse_raw(raw)
headers_names = parsed.headers_names
timestamp_idx = parsed.timestamp_idx
timestamp_name = parsed.timestamp_name
data = []
dates = []
for lst in parsed.data:
newlst = []
for item, hdr_name in zip(lst, headers_names):
if timestamp_name == hdr_name:
dates.append(convert_str_to_timestamp(item))
continue
newlst.append(pd.NA if item is None else item)
data.append(newlst)
headers_names.pop(timestamp_idx)
return _TransformedData(data, headers_names, dates)
def transform_to_dicts(raw: dict, fields: Strings, date_name: str) -> List[Dict]:
parsed = _parse_raw(raw)
headers_names = [header_name.casefold() for header_name in parsed.headers_names]
timestamp_idx = parsed.timestamp_idx
dicts = []
fields = [f.casefold() for f in fields]
for lst in parsed.data:
newlst = []
for field in fields:
if field in headers_names:
item = lst[headers_names.index(field)]
newlst.append(pd.NA if item is None else item)
else:
newlst.append(pd.NA)
dicts.append({date_name: lst[timestamp_idx], **dict(item for item in zip(fields, newlst))})
return dicts | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/_historical_raw_transf.py | 0.63409 | 0.372791 | _historical_raw_transf.py | pypi |
from enum import Enum
from typing import Union
from .._tools import make_enum_arg_parser
from .._base_enum import StrEnum
class DayIntervalType(Enum):
INTRA = 0
INTER = 1
class Intervals(StrEnum):
"""
The list of interval types of the boundary is described below.
The supported values of intervals :
Time:
Backend will return complete N-minute summaries data.
When the request start and/or end does not at the N minutes boundary,
the response will be adjusted.
MINUTE - return complete 1-minute
ONE_MINUTE - return complete 1-minute
FIVE_MINUTES - return complete 5-minutes
TEN_MINUTES - return complete 10-minutes
THIRTY_MINUTES - return complete 30-minutes
SIXTY_MINUTES - return complete 60-minutes
ONE_HOUR - return complete 1-hour
HOURLY - return complete 1-hour
Days:
DAILY - This is end of day, daily data
ONE_DAY - This is end of day, daily data
SEVEN_DAYS - Weekly boundary based on the exchange's
week summarization definition
WEEKLY - Weekly boundary based on the exchange's
ONE_WEEK - Weekly boundary based on the exchange's
week summarization definition
MONTHLY - Monthly boundary based on calendar month
ONE_MONTH - Monthly boundary based on calendar month
THREE_MONTHS - Quarterly boundary based on calendar quarter
QUARTERLY - Quarterly boundary based on calendar quarter
TWELVE_MONTHS - Yearly boundary based on calendar year
YEARLY - Yearly boundary based on calendar year
ONE_YEAR - Yearly boundary based on calendar year
"""
MINUTE = "PT1M"
ONE_MINUTE = "PT1M"
FIVE_MINUTES = "PT5M"
TEN_MINUTES = "PT10M"
THIRTY_MINUTES = "PT30M"
SIXTY_MINUTES = "PT60M"
HOURLY = "PT1H"
ONE_HOUR = "PT1H"
DAILY = "P1D"
ONE_DAY = "P7D"
SEVEN_DAYS = "P7D"
WEEKLY = "P1W"
ONE_WEEK = "P1W"
MONTHLY = "P1M"
ONE_MONTH = "P1M"
THREE_MONTHS = "P3M"
QUARTERLY = "P3M"
TWELVE_MONTHS = "P12M"
YEARLY = "P1Y"
ONE_YEAR = "P1Y"
_ISO8601_INTERVALS = [k for k in Intervals]
"""['PT1M', 'PT5M', 'PT10M', 'PT30M', 'PT60M', 'PT1H']"""
_INTRADAY = _ISO8601_INTERVALS[:6]
"""['P1D', 'P7D', 'P1W', 'P1M', 'P3M', 'P12M', 'P1Y']"""
_INTERDAY = _ISO8601_INTERVALS[6:]
interval_arg_parser = make_enum_arg_parser(Intervals, can_be_lower=True)
def get_day_interval_type(interval: Union[str, Intervals, DayIntervalType]) -> DayIntervalType:
if isinstance(interval, DayIntervalType):
return interval
interval = interval_arg_parser.get_str(interval)
if interval in _INTRADAY:
day_interval_type = DayIntervalType.INTRA
elif interval in _INTERDAY:
day_interval_type = DayIntervalType.INTER
else:
raise TypeError(f"Incorrect day interval, interval={interval}.")
return day_interval_type | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/_intervals.py | 0.819171 | 0.464234 | _intervals.py | pypi |
from dataclasses import dataclass
from functools import partial
from typing import TYPE_CHECKING, List, Callable
import pandas as pd
from .._content_data import Data
from .._content_data_provider import ContentDataProvider
from .._content_response_factory import ContentResponseFactory
from .._error_parser import ErrorParser
from ..._tools import PRICING_DATETIME_PATTERN, ValueParamItem
from ..._tools._common import fields_arg_parser, universe_arg_parser, cached_property
from ..._tools._dataframe import convert_df_columns_to_datetime_re, convert_dtypes
from ...delivery._data._data_provider import (
ContentValidator,
RequestFactory,
ValidatorContainer,
)
from ...delivery._stream.stream_cache import StreamCache
if TYPE_CHECKING:
from ...delivery._data._data_provider import ParsedData
# ---------------------------------------------------------------------------
# Response factory
# ---------------------------------------------------------------------------
class PriceCache:
def __init__(self, cache: dict):
self._cache = cache
def keys(self):
return self._cache.keys()
def values(self):
return self._cache.values()
def items(self):
return self._cache.items()
def __iter__(self):
return PricingCacheIterator(self)
def __getitem__(self, name):
if name in self.keys():
return self._cache[name]
raise KeyError(f"{name} not in PriceCache")
def __len__(self):
return len(self.keys())
def __str__(self):
return str(self._cache)
class PricingCacheIterator:
def __init__(self, price_cache: PriceCache):
self._price_cache = price_cache
self._universe = list(price_cache.keys())
self._index = 0
def __next__(self):
if self._index < len(self._universe):
name = self._universe[self._index]
result = self._price_cache[name]
self._index += 1
return result
raise StopIteration()
def create_price_cache(data: dict, fields) -> PriceCache:
cache = {}
for item in data:
key = item.get("Key")
if key:
name = key.get("Name")
service = key.get("Service")
status = item.get("State")
cache[name] = StreamCache(
name=name,
fields=fields,
service=service,
status=status,
record=item,
)
return PriceCache(cache)
status_code_to_value = {"NotEntitled": "#N/P", "NotFound": "#N/F"}
def pricing_build_df(raw: List[dict], universe: list, fields: list, **kwargs) -> pd.DataFrame:
"""Pricing dataframe builder.
Args:
raw (List[dict]): list of raw data to build dataframe.
universe (list): list of RICs.
fields (list): list of fields used to build dataframe.
**kwargs: additional keyword arguments.
Returns:
DataFrame: properly created dataframe.
"""
if not fields:
fields = list(dict.fromkeys(key for item in raw for key in item.get("Fields", {}).keys()))
data = []
num_fields = len(fields)
for idx, item in enumerate(raw):
inst_name = universe[idx]
if item["Type"] == "Status":
value = status_code_to_value.get(item["State"]["Code"])
values = [value] * num_fields
data.append([inst_name, *values])
else:
row = [inst_name]
for field in fields:
value = item["Fields"].get(field)
value = pd.NA if value is None else value
row.append(value)
data.append(row)
df = pd.DataFrame(data=data, columns=["Instrument", *fields])
convert_df_columns_to_datetime_re(df, PRICING_DATETIME_PATTERN)
df = convert_dtypes(df)
return df
@dataclass
class PricingData(Data):
@cached_property
def prices(self):
return create_price_cache(self.raw, self._kwargs)
# ---------------------------------------------------------------------------
# Request factory
# ---------------------------------------------------------------------------
pricing_query_params = [
ValueParamItem("universe", function=partial(universe_arg_parser.get_str, delim=",")),
ValueParamItem("fields", function=partial(fields_arg_parser.get_str, delim=",")),
]
class PricingRequestFactory(RequestFactory):
@property
def query_params_config(self):
return pricing_query_params
# ---------------------------------------------------------------------------
# Content data validator
# ---------------------------------------------------------------------------
class PricingContentValidator(ContentValidator):
@cached_property
def validators(self) -> List[Callable[["ParsedData"], bool]]:
return [self.status_is_not_error]
# ---------------------------------------------------------------------------
# Data provider
# ---------------------------------------------------------------------------
pricing_data_provider = ContentDataProvider(
request=PricingRequestFactory(),
response=ContentResponseFactory(data_class=PricingData),
parser=ErrorParser(),
validator=ValidatorContainer(content_validator=PricingContentValidator()),
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/pricing/_pricing_content_provider.py | 0.840357 | 0.187058 | _pricing_content_provider.py | pypi |
from typing import TYPE_CHECKING, Any, Callable, List, Optional, Union
from ..._content_type import ContentType
from ..._core.session import get_valid_session
from ..._tools import (
PRICING_DATETIME_PATTERN,
cached_property,
create_repr,
make_callback,
)
from ..._tools._dataframe import convert_df_columns_to_datetime_re
from ..._types import OptBool, OptStr, Strings
from ...delivery._stream.base_stream import StreamOpenWithUpdatesMixin
from .._universe_streams import UniverseStreamFacade, _UniverseStreams
if TYPE_CHECKING:
import pandas
from ... import OpenState
from ..._core.session import Session
from ...delivery.omm_stream import ContribResponse, ContribType
class PricingStream(UniverseStreamFacade):
pass
class Stream(StreamOpenWithUpdatesMixin):
"""
Summary line of this class are used for requesting, processing and managing a set of
streaming level 1 (MarketPrice domain) quotes and trades
Extended description of this class:
The object automatically manages a set of streaming caches available for access
at any time. Your application can then reach into this cache and pull out
real-time fields by just calling a simple access method.
The object also emits a number of different events, your application can
listen to in order to be notified of the latest field values in real-times.
The object iterable.
Parameters
----------
session : Session, optional
Means default session would be used
universe : str or list of str, optional
The single/multiple instrument/s name (e.g. "EUR=" or ["EUR=", "CAD=", "UAH="])
fields : list, optional
Specifies the specific fields to be delivered when messages arrive
service : str, optional
Name of the streaming service publishing the instruments
api: str, optional
Specifies the data source. It can be updated/added using config file
extended_params : dict, optional
If necessary other parameters
Examples
--------
>>> from refinitiv.data.content import pricing
>>> definition = pricing.Definition("EUR=")
>>> stream = definition.get_stream()
"""
def __init__(
self,
session: "Session" = None,
universe: Union[str, List[str]] = None,
fields: Optional[list] = None,
service: OptStr = None,
api: OptStr = None,
extended_params: Optional[dict] = None,
) -> None:
self._session = get_valid_session(session)
self._always_use_default_session = session is None
self._universe = universe or []
self._fields = fields
self._service = service
self._api = api
self._extended_params = extended_params
@cached_property
def _stream(self) -> _UniverseStreams:
return _UniverseStreams(
content_type=ContentType.STREAMING_PRICING,
item_facade_class=PricingStream,
universe=self._universe,
session=self._session,
fields=self._fields,
service=self._service,
api=self._api,
extended_params=self._extended_params,
)
def open(self, with_updates: bool = True) -> "OpenState":
"""
Opens the streaming connection to the Pricing data, and sends corresponding requests for all requested
instruments.
It will be opened once all the requested instruments are received
either on_refresh, on_status or other method.
Then the pricing.stream can be used in order to retrieve data.
Parameters
----------
with_updates : bool, optional
Boolean indicator of how to work with the stream. If True - all data will be received continuously. If
False - only the data snapshot will be received.
Returns
-------
OpenState
Examples
--------
>>> from refinitiv.data.content import pricing
>>> definition = pricing.Definition("EUR=")
>>> stream = definition.get_stream()
>>> stream.open()
"""
return super().open(with_updates=with_updates)
def close(self) -> "OpenState":
"""
Closes the streaming connection to the Pricing data.
Returns
-------
OpenState
Examples
--------
>>> from refinitiv.data.content import pricing
>>> definition = pricing.Definition("EUR=")
>>> stream = definition.get_stream()
>>> stream.open()
>>> stream.close()
"""
return super().close()
def _get_fields(self, universe: str, fields: Optional[list] = None) -> dict:
"""
Returns a dict of the fields for a requested universe.
Parameters
----------
universe: str
Name of the instrument (e.g. 'EUR=' ...).
fields: list, optional
The fields that are listed in the `fields` parameter.
Returns
-------
dict
Examples
--------
>>> from refinitiv.data.content import pricing
>>> definition = pricing.Definition(["AIRP.PA", "AIR.PA", "ALSO.PA"])
>>> stream = definition.get_stream()
>>> fields
... {'AIRP.PA': {'BID': 147.14, 'ASK': 147.16}}
"""
_fields = {
universe: {key: value for key, value in self._stream[universe].items() if fields is None or key in fields}
}
return _fields
def get_snapshot(
self,
universe: Optional[Union[str, Strings]] = None,
fields: Optional[Union[str, Strings]] = None,
convert: OptBool = True,
) -> "pandas.DataFrame":
"""
Returns a snapshot of the instruments stored in the in-memory data cache of the stream. When the stream is
opened, this data cache is kept up-to-date with the latest updates received from the platform.
Parameters
----------
universe: str, list of str, optional
Single instrument or list of instruments.
fields: str, list of str, optional
Single field or list of fields to return.
convert: bool, optional
If True - force numeric conversion to all values.
Returns
-------
pandas.DataFrame
pandas.DataFrame content:
- columns : instrument and fieled names
- rows : instrument name and field values
Raises
------
Exception
If request fails or if server returns an error
ValueError
If a parameter type or value is wrong
Examples
--------
>>> from refinitiv.data.content import pricing
>>> definition = pricing.Definition(
... ["MSFT.O", "GOOG.O", "IBM.N"],
... fields=["BID", "ASK", "OPEN_PRC"]
...)
>>> stream = definition.get_stream()
>>> data = stream.get_snapshot(["MSFT.O", "GOOG.O"], ["BID", "ASK"])
>>> data
... " Instrument BID ASK "
... "0 MSFT.O 150.9000 150.9500 "
... "1 GOOG.O 1323.9000 1327.7900"
"""
df = self._stream.get_snapshot(universe=universe, fields=fields, convert=convert)
convert_df_columns_to_datetime_re(df, PRICING_DATETIME_PATTERN)
return df
def on_refresh(self, func: Callable[[Any, str, "Stream"], Any]) -> "Stream":
"""
Called every time the whole fields list of a requested instrument is received from the platform.
Parameters
----------
func : Callable
Callable object to process the retrieved data.
Returns
-------
current instance
Examples
--------
>>> import datetime
>>> from refinitiv.data.content import pricing
>>> definition = pricing.Definition("EUR=")
>>> stream = definition.get_stream()
>>> def on_refresh(message, ric, stream):
... current_time = datetime.datetime.now().time()
... print("\t{} | Receive refresh [{}] : {}".format(current_time, ric, message)) # noqa
>>> stream.on_refresh(on_refresh)
>>> stream.open()
"""
self._stream.on_refresh(make_callback(func))
return self
def on_update(self, func: Callable[[Any, str, "Stream"], Any]) -> "Stream":
"""
Called when some fields of one of the requested instruments are updated.
Parameters
----------
func : Callable
Callable object to process the retrieved data
Returns
-------
current instance
Examples
--------
>>> import datetime
>>> from refinitiv.data.content import pricing
>>> definition = pricing.Definition("EUR=")
>>> stream = definition.get_stream()
>>> def on_update(update, ric, stream):
... current_time = datetime.datetime.now().time()
... print("\t{} | Receive update [{}] : {}".format(current_time, ric, update)) # noqa
>>> stream.on_update(on_update)
>>> stream.open()
"""
self._stream.on_update(make_callback(func))
return self
def on_status(self, func: Callable[[Any, str, "Stream"], Any]) -> "Stream":
"""
Called when a status is received for one of the requested instruments.
Parameters
----------
func : Callable
Callable object to process the retrieved data.
Returns
-------
current instance
Examples
--------
>>> from refinitiv.data.content import pricing
>>> definition = pricing.Definition("EUR=")
>>> stream = definition.get_stream()
>>> def on_status(status, ric, stream):
... print("\tReceive status [{}] : {}".format(ric, status))
>>> stream.on_status(on_status)
>>> stream.open()
"""
self._stream.on_status(make_callback(func))
return self
def on_complete(self, func: Callable[["Stream"], Any]) -> "Stream":
"""
Called after the requested instruments and fields are completely received. on_complete is only called once
per stream opening.
Parameters
----------
func : Callable
Callable object to process the retrieved data
Returns
-------
current instance
Examples
--------
>>> import datetime
>>> from refinitiv.data.content import pricing
>>> definition = pricing.Definition("EUR=")
>>> stream = definition.get_stream()
>>> def on_complete(stream):
... current_time = datetime.datetime.now().time()
... print("\t{} | Receive complete".format(current_time))
>>> stream.on_complete(on_complete)
>>> stream.open()
"""
self._stream.on_complete(func)
return self
def on_error(self, func: Callable) -> "Stream":
self._stream.on_error(make_callback(func))
return self
def on_ack(self, func: Callable) -> "Stream":
"""
Called when a data retrieval error happens.
Parameters
----------
func : Callable, optional
Callable object to process the retrieved data.
Returns
-------
Stream
current instance
Examples
--------
Prerequisite: The default session must be opened.
>>> from refinitiv.data.content import pricing
>>>
>>> definition = pricing.Definition("EUR=")
>>> stream = definition.get_stream()
>>> def on_ack(ack_msg, ric, stream):
... print(f"\tReceive ack [{ric}] : {ack_msg}")
>>> stream.on_ack(on_ack)
>>> stream.open()
>>> result = stream.contribute("EUR=", {"BID": 1.12})
"""
self._stream.on_ack(make_callback(func))
return self
def contribute(
self,
name: str,
fields: dict,
contrib_type: Union[str, "ContribType", None] = None,
post_user_info: Optional[dict] = None,
) -> "ContribResponse":
"""
Function to send OnStream contribution request.
Parameters
----------
name: string
RIC to contribute to.
fields: dict{field:value}
Specify fields and values to contribute.
contrib_type: Union[str, ContribType], optional
Define the contribution type.
Default: "Update"
post_user_info: dict, optional
PostUserInfo object represents information about the posting user.
Address: string, required
Dotted-decimal string representing the IP Address of the posting user.
UserID: int, required
Specifies the ID of the posting user
Returns
-------
ContribResponse
Examples
--------
>>> import refinitiv.data as rd
>>>
>>> pricing_stream = rd.content.pricing.Definition(
... ["MSFT.O", "GOOG.O", "IBM.N"],
... fields=["BID", "ASK", "OPEN_PRC"]
...).get_stream()
>>> response = pricing_stream.contribute("MSFT.O", {"BID": 240.83})
"""
return self._stream.contribute(name, fields, contrib_type, post_user_info)
async def contribute_async(
self,
name: str,
fields: dict,
contrib_type: Union[str, "ContribType", None] = None,
post_user_info: Optional[dict] = None,
) -> "ContribResponse":
"""
Function to send asynchronous OnStream contribution request.
Parameters
----------
name: string
RIC to contribute to.
fields: dict{field:value}
Specify fields and values to contribute.
contrib_type: Union[str, ContribType], optional
Define the contribution type.
Default: "Update"
post_user_info: dict, optional
PostUserInfo object represents information about the posting user.
Address: string, required
Dotted-decimal string representing the IP Address of the posting user.
UserID: int, required
Specifies the ID of the posting user
Returns
-------
ContribResponse
Examples
--------
>>> import refinitiv.data as rd
>>>
>>> pricing_stream = rd.content.pricing.Definition(
... ["MSFT.O", "GOOG.O", "IBM.N"],
... fields=["BID", "ASK", "OPEN_PRC"]
...).get_stream()
>>> response = await pricing_stream.contribute_async("MSFT.O", {"BID": 240.83})
"""
return await self._stream.contribute_async(name, fields, contrib_type, post_user_info)
def add_instruments(self, instruments) -> None:
"""
Add instruments to the stream universe.
Parameters
----------
instruments: str, list of str, optional
List of instruments to add.
Examples
--------
>>> from refinitiv.data.content import pricing
>>> definition = pricing.Definition(
... ["MSFT.O", "GOOG.O", "IBM.N"],
...)
>>> stream = definition.get_stream()
>>> stream.add_instruments("VOD.L")
"""
self._stream.add_instruments(instruments)
def remove_instruments(self, instruments) -> None:
"""
Remove instruments from the stream universe.
Parameters
----------
instruments: str, list of str, optional
List of instruments to remove.
Examples
--------
>>> from refinitiv.data.content import pricing
>>> definition = pricing.Definition(
... ["MSFT.O", "GOOG.O", "IBM.N"],
...)
>>> stream = definition.get_stream()
>>> stream.remove_instruments("GOOG.O")
"""
self._stream.remove_instruments(instruments)
def add_fields(self, fields) -> None:
"""
Add fields to the fields list.
Parameters
----------
fields: str, list of str, optional
List of fields to add.
Examples
--------
>>> from refinitiv.data.content import pricing
>>> definition = pricing.Definition(
... ["MSFT.O", "GOOG.O", "IBM.N"],
... fields=["BID", "ASK", "OPEN_PRC"]
...)
>>> stream = definition.get_stream()
>>> stream.add_fields("TRDPRC_1")
"""
self._stream.add_fields(fields)
def remove_fields(self, fields) -> None:
"""
Remove fields from the fields list.
Parameters
----------
fields: str, list of str, optional
List of fields to remove.
Examples
--------
>>> from refinitiv.data.content import pricing
>>> definition = pricing.Definition(
... ["MSFT.O", "GOOG.O", "IBM.N"],
... fields=["BID", "ASK", "OPEN_PRC"]
...)
>>> stream = definition.get_stream()
>>> stream.remove_fields("ASK")
"""
self._stream.remove_fields(fields)
def __iter__(self):
return self._stream.__iter__()
def __getitem__(self, item) -> "PricingStream":
return self._stream.__getitem__(item)
def __len__(self) -> int:
return self._stream.__len__()
def __repr__(self):
return create_repr(
self,
class_name=self.__class__.__name__,
content=f"{{name='{self._universe}'}}",
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/pricing/_stream_facade.py | 0.929991 | 0.307293 | _stream_facade.py | pypi |
from typing import TYPE_CHECKING
from ._pricing_content_provider import PricingData
from ._stream_facade import Stream
from .._content_provider_layer import ContentUsageLoggerMixin
from ..._content_type import ContentType
from ..._core.session import Session
from ..._tools import create_repr, try_copy_to_list
from ..._tools._common import universe_arg_parser, fields_arg_parser
from ...delivery._data._data_provider import DataProviderLayer, BaseResponse
if TYPE_CHECKING:
from ..._types import OptStr, ExtendedParams, StrStrings, OptStrStrs
class Definition(
ContentUsageLoggerMixin[BaseResponse[PricingData]],
DataProviderLayer[BaseResponse[PricingData]],
):
"""
Creates a definition of information about the specific Pricing data.
Parameters
----------
universe : str or list of str
Single instrument or list of instruments.
fields : str or list of str, optional
Single field or list of fields to return.
service : str, optional
Name of the streaming service publishing the instruments.
api: str, optional
Specifies the data source for the further retrieval of data.
extended_params : dict, optional
Specifies the parameters that will be merged with the request.
Examples
--------
>>> from refinitiv.data.content import pricing
>>> definition = pricing.Definition("EUR=")
>>> response = definition.get_data()
"""
_USAGE_CLS_NAME = "Pricing.PricingDefinition"
def __init__(
self,
universe: "StrStrings",
fields: "OptStrStrs" = None,
service: "OptStr" = None,
api: "OptStr" = None,
extended_params: "ExtendedParams" = None,
) -> None:
extended_params = extended_params or {}
universe = extended_params.pop("universe", universe)
universe = try_copy_to_list(universe)
universe = universe_arg_parser.get_list(universe)
fields = extended_params.pop("fields", fields)
fields = try_copy_to_list(fields)
fields = fields_arg_parser.get_unique(fields or [])
super().__init__(
data_type=ContentType.PRICING,
universe=universe,
fields=fields,
extended_params=extended_params,
)
self._universe = universe
self._fields = fields
self._service = service
self._api = api
self._extended_params = extended_params
def __repr__(self) -> str:
return create_repr(
self,
content=f"{{name={self._universe}}}",
)
def get_stream(self, session: Session = None) -> Stream:
"""
Creates and returns the pricing stream that allows you to get streaming data for previously defined instruments.
Parameters
----------
session : Session, optional
Session object. If it's not passed the default session will be used.
Returns
-------
pricing.Stream
Examples
--------
>>> from refinitiv.data.content import pricing
>>> definition = pricing.Definition("IBM")
>>> stream = definition.get_stream()
>>> stream.open()
"""
return Stream(
universe=self._universe,
session=session,
fields=self._fields,
service=self._service,
api=self._api,
extended_params=self._extended_params,
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/pricing/_definition.py | 0.894999 | 0.232169 | _definition.py | pypi |
disp_tmpl_to_num_summary_links = {
10: 0,
11: 0,
26: 12,
27: 1,
30: 1,
31: 1,
32: 0,
33: 0,
35: 1,
37: 0,
39: 1,
40: 1,
41: 0,
45: 1,
46: 0,
47: 1,
51: 0,
53: 0,
55: 0,
57: 0,
63: 0,
74: 1,
88: 0,
89: 0,
90: 8,
91: 1,
92: 13,
93: 1,
94: 1,
95: 0,
114: 2,
134: 0,
158: 2,
159: 1,
160: 1,
161: 1,
162: 3,
163: 0,
164: 0,
165: 0,
166: 1,
167: 0,
168: 0,
173: 0,
178: 0,
179: 0,
180: 1,
181: 0,
182: 0,
183: 0,
184: 0,
185: 0,
186: 4,
187: 1,
188: 0,
189: 0,
191: 1,
193: 0,
195: 0,
197: 1,
198: 1,
199: 1,
200: 2,
201: 2,
202: 0,
203: 0,
204: 0,
205: 2,
206: 0,
207: 7,
208: 0,
209: 1,
210: 0,
211: 0,
213: 1,
214: 0,
215: 2,
216: 2,
217: 1,
218: 1,
219: 0,
220: 1,
221: 0,
222: 0,
223: 0,
224: 0,
225: 0,
226: 0,
227: 0,
228: 0,
229: 1,
230: 0,
231: 1,
232: 1,
233: 3,
234: 2,
235: 1,
238: 1,
239: 0,
240: 1,
241: 1,
242: 1,
243: 0,
244: 0,
245: 0,
246: 7,
247: 7,
248: 7,
249: 3,
250: 0,
251: 0,
252: 0,
253: 0,
254: 1,
255: 0,
1001: 0,
1002: 0,
1003: 0,
1004: 0,
1005: 0,
1006: 0,
1007: 0,
1008: 0,
1010: 1,
1014: 1,
1015: 1,
1016: 1,
1018: 1,
1020: 0,
1021: 8,
1022: 2,
1023: 1,
1024: 1,
1025: 1,
1026: 7,
1027: 4,
1028: 3,
1029: 3,
1030: 5,
1031: 5,
1032: 2,
1033: 3,
1034: 3,
1035: 2,
1036: 7,
1037: 15,
1038: 15,
1039: 15,
1040: 14,
1046: 2,
1047: 1,
1048: 1,
1050: 3,
1051: 3,
1052: 2,
1053: 1,
1054: 7,
1055: 3,
1056: 3,
1057: 3,
1058: 16,
1061: 0,
1062: 0,
1063: 0,
1065: 0,
1067: 2,
1068: 2,
1069: 0,
1070: 0,
1072: 0,
1074: 0,
1075: 0,
1076: 2,
1077: 1,
1078: 1,
1079: 5,
1081: 0,
1082: 0,
1085: 0,
1086: 0,
1087: 0,
1088: 0,
1092: 0,
1093: 0,
1094: 1,
1095: 12,
1096: 8,
1097: 0,
1098: 6,
1099: 6,
1100: 6,
1101: 0,
1102: 0,
1103: 1,
1105: 12,
1107: 0,
1108: 2,
1116: 2,
1118: 0,
1119: 1,
1120: 1,
1121: 1,
1122: 2,
1125: 0,
1126: 3,
1127: 1,
1128: 2,
1129: 4,
1130: 15,
1131: 2,
1132: 1,
1133: 1,
1134: 1,
1135: 5,
1136: 4,
1137: 3,
1138: 2,
1139: 4,
1140: 3,
1141: 3,
1142: 4,
1143: 4,
1146: 0,
1148: 0,
1150: 0,
1151: 2,
1152: 2,
1153: 2,
1154: 2,
1155: 2,
1156: 2,
1157: 3,
1159: 1,
1160: 3,
1161: 3,
1164: 1,
1166: 0,
1167: 0,
1168: 8,
1169: 15,
1170: 4,
1171: 2,
1172: 0,
1174: 2,
1177: 0,
1178: 0,
1179: 0,
1180: 0,
1181: 0,
1182: 0,
1183: 0,
1184: 0,
1185: 0,
1186: 0,
1188: 0,
1190: 1,
1191: 3,
1192: 2,
1193: 0,
1194: 0,
1195: 0,
1196: 1,
1199: 0,
1200: 1,
1201: 1,
1202: 2,
1204: 0,
1207: 0,
1211: 2,
1212: 1,
1213: 1,
1214: 0,
1215: 0,
1216: 2,
1217: 3,
1218: 1,
1219: 1,
1220: 1,
1221: 15,
1222: 3,
1223: 4,
1224: 2,
1227: 0,
1228: 0,
1230: 3,
1232: 4,
1233: 0,
1234: 0,
1235: 0,
1236: 1,
1237: 1,
1238: 2,
1239: 2,
1241: 0,
1242: 16,
1243: 0,
1244: 0,
1245: 0,
1247: 1,
1248: 2,
1249: 2,
1250: 0,
1251: 2,
1252: 2,
1253: 2,
1254: 2,
1255: 0,
1258: 0,
1259: 0,
1260: 0,
1261: 6,
1263: 1,
1264: 1,
1265: 3,
1266: 7,
1267: 15,
1268: 15,
1269: 10,
1270: 1,
1271: 14,
1272: 5,
1274: 0,
1275: 0,
1277: 4,
1278: 4,
1279: 4,
1280: 4,
1281: 0,
1284: 15,
1285: 0,
1286: 5,
1287: 3,
1288: 4,
1289: 0,
1290: 0,
1291: 0,
1292: 5,
1293: 5,
1294: 7,
1295: 7,
1296: 0,
1297: 0,
1298: 3,
1300: 0,
1301: 0,
1303: 0,
1304: 0,
1306: 0,
1307: 0,
1308: 7,
1309: 0,
1311: 1,
1312: 0,
1313: 0,
1316: 0,
1318: 0,
1319: 1,
1320: 0,
1322: 0,
1323: 0,
1324: 0,
1325: 0,
1329: 0,
1330: 2,
1331: 3,
1332: 1,
1333: 3,
1334: 0,
1335: 0,
1336: 0,
1343: 2,
1344: 1,
1345: 1,
1348: 0,
1349: 2,
1350: 1,
1352: 1,
1353: 10,
1354: 10,
1355: 0,
1357: 1,
1358: 2,
1359: 0,
1360: 0,
1361: 0,
1362: 0,
1363: 12,
1364: 9,
1365: 9,
1368: 2,
1369: 1,
1370: 2,
1371: 2,
1372: 2,
1373: 2,
1374: 2,
1375: 2,
1376: 2,
1377: 2,
1378: 2,
1379: 2,
1380: 2,
1381: 0,
1382: 1,
1383: 0,
1384: 4,
1385: 5,
1386: 1,
1389: 0,
1390: 1,
1391: 1,
1393: 0,
1395: 0,
1396: 0,
1397: 0,
1398: 0,
1400: 0,
1401: 0,
1402: 0,
1404: 0,
1405: 0,
1406: 2,
1407: 0,
1408: 2,
1409: 0,
1414: 0,
1415: 15,
1416: 15,
1417: 2,
1418: 0,
1420: 1,
1421: 0,
1422: 0,
1423: 0,
1426: 3,
1427: 0,
1430: 0,
1431: 2,
1432: 2,
1433: 2,
1434: 2,
1435: 2,
1436: 2,
1437: 2,
1438: 2,
1439: 2,
1440: 2,
1441: 2,
1443: 2,
1444: 4,
1445: 4,
1446: 2,
1447: 2,
1448: 0,
1449: 1,
1450: 1,
1451: 1,
1452: 1,
1453: 0,
1454: 0,
1455: 15,
1456: 1,
1457: 3,
1459: 1,
1460: 6,
1461: 10,
1462: 2,
1463: 0,
1464: 2,
1465: 7,
1468: 1,
1469: 2,
1470: 0,
1472: 0,
1474: 0,
1475: 0,
1476: 0,
1482: 0,
1483: 1,
1484: 4,
1485: 4,
1486: 0,
1487: 0,
1488: 1,
1489: 0,
1491: 2,
1492: 1,
1493: 1,
1494: 1,
1495: 0,
1497: 0,
1498: 3,
1501: 4,
1502: 1,
1504: 1,
1505: 1,
1506: 1,
1510: 8,
1511: 0,
1512: 7,
1513: 0,
1514: 0,
1516: 9,
1519: 0,
1520: 0,
1521: 0,
1522: 10,
1523: 0,
1530: 1,
1531: 3,
1532: 0,
1533: 0,
1534: 0,
1535: 0,
1536: 1,
1537: 1,
1538: 1,
1539: 2,
1540: 1,
1541: 2,
1542: 2,
1543: 2,
1544: 14,
1545: 0,
1546: 10,
1547: 6,
1548: 8,
1549: 10,
1550: 1,
1551: 1,
1552: 2,
1553: 2,
1554: 0,
1555: 1,
1556: 1,
1557: 2,
1558: 2,
1559: 3,
1560: 3,
1562: 3,
1563: 0,
1564: 1,
1565: 0,
1566: 1,
1567: 2,
1569: 1,
1570: 2,
1571: 0,
1572: 1,
1573: 1,
1574: 1,
1575: 1,
1576: 1,
1577: 0,
1578: 2,
1579: 7,
1580: 2,
1581: 2,
1583: 0,
1586: 0,
1587: 0,
1590: 10,
1591: 10,
1593: 0,
1594: 0,
1595: 0,
1596: 0,
1597: 0,
1598: 1,
1599: 0,
1604: 1,
1605: 0,
1606: 0,
1608: 1,
1609: 1,
1610: 1,
1611: 6,
1612: 0,
1613: 1,
1614: 1,
1615: 16,
1617: 2,
1619: 0,
1622: 0,
1623: 1,
1624: 0,
1625: 1,
1627: 0,
1628: 1,
1629: 2,
1630: 7,
1632: 1,
1633: 1,
1634: 1,
1635: 4,
1636: 0,
1637: 0,
1644: 0,
1645: 0,
1647: 0,
1650: 2,
1652: 2,
1654: 2,
1655: 2,
1656: 5,
1657: 14,
1658: 5,
1659: 14,
1660: 9,
1661: 8,
1662: 0,
1663: 0,
1664: 0,
1665: 0,
1666: 0,
1668: 0,
1669: 4,
1670: 2,
1671: 2,
1672: 4,
1673: 4,
1675: 2,
1676: 2,
1677: 2,
1678: 2,
1679: 1,
1680: 1,
1683: 1,
1684: 3,
1685: 7,
1687: 1,
1688: 1,
1689: 2,
1690: 0,
1691: 1,
1692: 0,
1693: 2,
1694: 2,
1695: 2,
1699: 2,
1701: 0,
1702: 0,
1703: 0,
1704: 0,
1705: 0,
1706: 0,
1707: 0,
1715: 0,
1716: 0,
1718: 0,
1719: 0,
1720: 0,
1722: 0,
1723: 3,
1724: 3,
1727: 0,
1728: 1,
1729: 1,
1730: 0,
1731: 0,
1738: 0,
1741: 1,
1742: 1,
1744: 17,
1745: 2,
1746: 4,
1747: 4,
1748: 1,
1749: 1,
1750: 1,
1751: 6,
1752: 0,
1756: 1,
1758: 1,
1760: 0,
1761: 0,
1762: 0,
1763: 1,
1764: 0,
1765: 0,
1767: 1,
1768: 0,
1769: 1,
1770: 0,
1771: 0,
1772: 0,
1773: 0,
1774: 15,
1775: 1,
1778: 2,
1779: 1,
1780: 0,
1781: 0,
1784: 3,
1785: 2,
1786: 3,
1790: 2,
1792: 6,
1799: 1,
1800: 0,
1801: 0,
1802: 2,
1804: 0,
1805: 0,
1806: 0,
1807: 0,
1808: 0,
1809: 0,
1810: 0,
1811: 0,
1812: 0,
1813: 0,
1814: 0,
1815: 0,
1816: 0,
1817: 0,
1818: 0,
1821: 1,
1822: 1,
1823: 2,
1824: 10,
1825: 9,
1826: 0,
1827: 0,
1828: 0,
1829: 2,
1830: 1,
1831: 0,
1833: 0,
1834: 8,
1835: 2,
1840: 3,
1843: 0,
1846: 0,
1847: 7,
1848: 7,
1849: 7,
1850: 7,
1851: 7,
1854: 1,
1855: 1,
1856: 1,
1859: 1,
1861: 1,
1863: 0,
1864: 1,
1867: 0,
1869: 0,
1870: 0,
1871: 0,
1872: 0,
1873: 0,
1874: 0,
1875: 0,
1876: 8,
1878: 1,
1880: 2,
1884: 8,
1885: 9,
1886: 9,
1890: 0,
1893: 12,
1895: 7,
1896: 15,
1897: 6,
1898: 5,
1900: 2,
1901: 1,
1902: 1,
1903: 1,
1904: 1,
1906: 0,
1910: 2,
1911: 5,
1912: 5,
1913: 5,
1916: 0,
1917: 6,
1920: 0,
1922: 2,
1924: 0,
1927: 2,
1928: 2,
1929: 2,
1935: 0,
1936: 1,
1937: 0,
1938: 0,
1939: 0,
1940: 0,
1941: 0,
1942: 0,
1943: 0,
1945: 0,
1946: 1,
1947: 1,
1948: 5,
1951: 2,
1952: 3,
1953: 0,
1957: 0,
1958: 0,
1960: 6,
1961: 1,
1962: 17,
1966: 0,
1967: 0,
1968: 0,
1969: 0,
1971: 0,
1973: 0,
1974: 1,
1975: 5,
1979: 0,
1980: 0,
1981: 0,
1982: 0,
1984: 0,
1985: 0,
1986: 0,
1987: 0,
1988: 0,
1989: 0,
1990: 0,
1991: 0,
1992: 6,
1993: 5,
1994: 0,
1995: 6,
1997: 0,
1998: 1,
2000: 0,
2003: 1,
2005: 0,
2007: 1,
2008: 0,
2011: 2,
2012: 1,
2014: 9,
2015: 6,
2016: 0,
2017: 0,
2019: 4,
2020: 0,
2021: 1,
2022: 11,
2023: 0,
2024: 1,
2025: 1,
2026: 9,
2027: 10,
2028: 10,
2029: 17,
2030: 1,
2032: 1,
2033: 2,
2036: 1,
2037: 0,
2038: 1,
2040: 0,
2041: 1,
2043: 0,
2044: 0,
2046: 2,
2050: 1,
2051: 0,
2052: 6,
2053: 8,
2054: 10,
2055: 6,
2056: 8,
2057: 10,
2058: 1,
2059: 1,
2064: 2,
2068: 0,
2069: 0,
2082: 0,
2083: 1,
2084: 6,
2086: 1,
2088: 0,
2089: 0,
2093: 1,
2094: 0,
2095: 0,
2098: 0,
2099: 0,
2151: 0,
2154: 1,
2158: 1,
2159: 0,
2160: 0,
2161: 0,
2162: 0,
2163: 1,
2164: 8,
2166: 0,
2168: 0,
2169: 5,
2172: 6,
2175: 0,
2178: 0,
2179: 1,
2181: 0,
2188: 0,
2192: 3,
2193: 0,
2199: 6,
2202: 0,
2203: 0,
2204: 0,
2205: 0,
2206: 0,
2207: 0,
2208: 0,
2209: 0,
2210: 1,
2211: 1,
2213: 6,
2214: 4,
2216: 1,
2217: 3,
2227: 0,
2228: 0,
2232: 0,
2233: 0,
2234: 0,
2237: 1,
2238: 3,
2239: 2,
2245: 0,
2247: 0,
2250: 2,
2252: 1,
2255: 0,
2256: 0,
2259: 0,
2260: 0,
2261: 0,
2262: 0,
2263: 0,
2268: 3,
2269: 3,
2270: 1,
2275: 0,
2278: 0,
2279: 2,
2280: 2,
2281: 10,
2282: 1,
2283: 0,
2286: 0,
2288: 1,
2289: 1,
2291: 0,
2292: 0,
2293: 0,
2298: 0,
2299: 0,
2301: 1,
2302: 0,
2303: 0,
2304: 0,
2306: 0,
2307: 0,
2308: 0,
2309: 0,
2310: 3,
2313: 1,
2314: 1,
2316: 0,
2317: 1,
2321: 1,
2322: 1,
2323: 1,
2324: 1,
2326: 3,
2328: 0,
2331: 6,
2343: 0,
2353: 1,
2354: 1,
2355: 0,
2357: 0,
2363: 1,
2366: 1,
2367: 3,
2369: 2,
2371: 2,
2372: 1,
2373: 0,
2375: 0,
2376: 6,
2377: 6,
2378: 0,
2379: 0,
2381: 0,
2383: 0,
2385: 0,
2387: 0,
2388: 0,
2390: 1,
2391: 0,
2395: 0,
2396: 0,
2397: 1,
2398: 0,
2399: 0,
2400: 0,
2401: 0,
2403: 0,
2405: 2,
2406: 2,
2408: 2,
2409: 2,
2413: 0,
2415: 1,
2416: 1,
2425: 6,
2426: 6,
2427: 6,
2428: 6,
2429: 0,
2430: 1,
2431: 1,
2432: 0,
2433: 1,
2434: 2,
2435: 0,
2436: 0,
2440: 2,
2443: 0,
2447: 0,
2451: 3,
2452: 3,
2453: 7,
2456: 0,
2464: 0,
2465: 0,
2466: 0,
2469: 0,
2470: 2,
2471: 2,
2473: 2,
2474: 2,
2475: 0,
2478: 0,
2481: 3,
2482: 3,
2487: 3,
2488: 3,
2490: 0,
2495: 0,
2497: 3,
2498: 0,
2500: 0,
2502: 2,
2503: 2,
2505: 2,
2508: 2,
2509: 0,
2511: 0,
2512: 1,
2513: 1,
2514: 1,
2516: 0,
2517: 1,
2520: 0,
2522: 0,
2524: 1,
2528: 0,
2533: 0,
2535: 0,
2537: 3,
2538: 2,
2542: 3,
2543: 1,
2545: 0,
2546: 0,
2547: 1,
2548: 1,
2550: 3,
2555: 3,
2560: 2,
2562: 3,
2563: 0,
2564: 0,
2565: 0,
2566: 0,
2568: 3,
2569: 3,
2570: 3,
2573: 2,
2574: 2,
2576: 2,
2577: 2,
2578: 1,
2581: 0,
2583: 0,
2584: 0,
2585: 0,
2587: 0,
2588: 0,
2589: 0,
2590: 0,
2591: 0,
2593: 0,
2594: 0,
2595: 0,
2597: 2,
2598: 0,
2599: 0,
2653: 1,
2655: 1,
2656: 0,
2658: 1,
2662: 0,
2663: 0,
2664: 0,
2668: 1,
2670: 1,
2672: 0,
2674: 1,
2677: 1,
2679: 1,
2680: 1,
2682: 3,
2683: 3,
2684: 3,
2685: 3,
2691: 1,
2693: 1,
2694: 1,
2695: 1,
2702: 0,
2703: 9,
2704: 0,
2705: 0,
2707: 1,
2708: 1,
2709: 1,
2710: 1,
2711: 0,
2714: 0,
2717: 0,
2718: 2,
2719: 2,
2720: 2,
2721: 2,
2722: 2,
2723: 2,
2724: 0,
2725: 1,
2727: 2,
2728: 2,
2733: 0,
2738: 0,
2739: 0,
2742: 0,
2744: 3,
2745: 3,
2746: 0,
2748: 0,
2750: 0,
2751: 0,
2754: 3,
2755: 15,
2756: 0,
2757: 0,
2758: 0,
2759: 3,
2761: 1,
2763: 0,
2764: 0,
2774: 0,
2781: 0,
2786: 0,
2789: 15,
2791: 0,
2792: 0,
2796: 2,
2799: 1,
2800: 1,
2801: 3,
2802: 3,
2813: 2,
2814: 1,
2815: 1,
2816: 0,
2820: 0,
2822: 0,
2825: 0,
2830: 0,
2831: 0,
2832: 0,
2833: 0,
2834: 1,
2835: 0,
2836: 0,
2837: 0,
2838: 3,
2842: 1,
2846: 3,
2847: 1,
2848: 0,
2849: 0,
2850: 2,
2851: 0,
2852: 2,
2853: 0,
2854: 3,
2855: 0,
2858: 1,
2859: 6,
2860: 6,
2861: 3,
2863: 0,
2865: 0,
2867: 5,
2869: 1,
2877: 0,
2882: 1,
2883: 1,
2884: 1,
2885: 1,
2886: 1,
2887: 1,
2888: 1,
2889: 1,
2890: 1,
2891: 1,
2892: 1,
2893: 1,
2897: 1,
2898: 1,
2899: 1,
2900: 1,
2901: 1,
2902: 1,
2903: 1,
2904: 1,
2905: 1,
2906: 1,
2907: 1,
2908: 1,
2910: 0,
2911: 0,
2913: 3,
2914: 3,
2916: 2,
2917: 0,
2918: 1,
2919: 0,
2920: 1,
2932: 0,
2933: 0,
2935: 0,
2938: 0,
2939: 2,
2940: 0,
2942: 0,
2944: 0,
2945: 0,
2946: 2,
2948: 3,
2949: 1,
2951: 1,
2954: 0,
2955: 10,
2956: 1,
2959: 1,
2960: 6,
2961: 6,
2963: 1,
2964: 0,
2965: 3,
2968: 0,
2970: 0,
2971: 2,
2972: 2,
2973: 2,
2974: 2,
2975: 2,
2976: 2,
2980: 2,
2981: 1,
2982: 3,
2983: 16,
2984: 2,
2986: 1,
2988: 1,
2991: 0,
2992: 0,
2993: 3,
2994: 0,
2995: 0,
2996: 0,
2997: 0,
3004: 0,
3007: 0,
3011: 2,
3012: 0,
3015: 0,
3019: 0,
3020: 1,
3021: 0,
3022: 0,
3027: 0,
3028: 2,
3030: 0,
3031: 1,
3032: 0,
3033: 0,
3034: 0,
3035: 0,
3036: 0,
3038: 1,
3039: 2,
3041: 0,
3042: 0,
3044: 2,
3045: 2,
3047: 1,
3048: 0,
3050: 0,
3052: 2,
3055: 0,
3057: 2,
3059: 2,
3061: 3,
3062: 3,
3063: 2,
3065: 3,
3066: 3,
3068: 3,
3071: 2,
3073: 2,
3074: 4,
3076: 3,
3077: 0,
3081: 3,
3083: 2,
3085: 2,
3090: 1,
3092: 0,
3093: 2,
3094: 2,
3095: 2,
3096: 2,
3097: 2,
3098: 2,
3099: 2,
3100: 2,
3101: 2,
3105: 0,
3106: 0,
3107: 1,
3108: 1,
3109: 2,
3110: 1,
3112: 11,
3114: 0,
3117: 0,
3118: 6,
3120: 2,
3121: 1,
3122: 0,
3125: 1,
3126: 0,
3127: 2,
3129: 1,
3133: 0,
3136: 0,
3137: 0,
3138: 2,
3139: 2,
3140: 2,
3141: 2,
3145: 0,
3146: 0,
3148: 0,
3149: 0,
3150: 1,
3151: 0,
3152: 0,
3153: 0,
3154: 0,
3155: 0,
3156: 0,
3157: 0,
3158: 0,
3159: 0,
3160: 0,
3161: 0,
3162: 2,
3163: 2,
3164: 2,
3165: 2,
3166: 2,
3167: 2,
3169: 1,
3171: 2,
3175: 1,
3176: 0,
3177: 0,
3178: 0,
3179: 0,
3202: 0,
3203: 1,
3204: 1,
3210: 0,
3211: 0,
3212: 0,
3215: 0,
3216: 0,
3217: 0,
3218: 0,
3221: 0,
3222: 0,
3223: 0,
3224: 0,
3225: 0,
3226: 0,
3227: 0,
3229: 0,
3230: 0,
3231: 0,
3232: 8,
3233: 2,
3235: 1,
3236: 1,
3237: 0,
3239: 0,
3241: 0,
3244: 0,
3245: 0,
3247: 0,
3249: 1,
3251: 0,
3254: 0,
3256: 0,
3257: 0,
3259: 0,
3260: 0,
3261: 0,
3262: 1,
3264: 4,
3265: 6,
3267: 1,
3268: 0,
3269: 0,
3271: 2,
3272: 0,
3274: 0,
3275: 1,
3277: 2,
3278: 2,
3281: 3,
3283: 4,
3284: 3,
3287: 3,
3288: 3,
3289: 4,
3290: 7,
3292: 2,
3295: 0,
3296: 0,
3299: 1,
3300: 3,
3303: 0,
3305: 0,
3306: 2,
3308: 1,
3309: 0,
3310: 0,
3313: 0,
3314: 0,
3315: 0,
3316: 3,
3317: 2,
3318: 3,
3319: 13,
3321: 0,
3323: 0,
3324: 0,
3326: 1,
3328: 3,
3329: 3,
3331: 3,
3333: 2,
3334: 0,
3335: 0,
3336: 0,
3337: 0,
3338: 0,
3339: 0,
3340: 0,
3341: 0,
3342: 0,
3343: 0,
3344: 0,
3345: 0,
3346: 0,
3348: 0,
3349: 0,
3350: 0,
3352: 2,
3354: 1,
3355: 1,
3365: 2,
3372: 1,
3373: 0,
3374: 0,
3377: 3,
3378: 3,
3380: 5,
3381: 0,
3382: 2,
3385: 0,
3388: 1,
3390: 0,
3392: 0,
3393: 0,
3394: 2,
3395: 2,
3397: 0,
3399: 0,
3400: 1,
3402: 0,
3406: 3,
3410: 0,
3411: 8,
3412: 0,
3413: 17,
3416: 1,
3417: 0,
3418: 1,
3420: 2,
3422: 1,
3425: 3,
3427: 2,
3428: 2,
3429: 0,
3430: 1,
3441: 0,
3442: 0,
3443: 0,
3444: 0,
3445: 1,
3451: 1,
3452: 1,
3453: 2,
3454: 8,
3455: 7,
3456: 1,
3460: 2,
3463: 1,
3464: 0,
3465: 2,
3467: 7,
3469: 11,
3471: 2,
3475: 0,
3478: 3,
3482: 0,
3484: 1,
3486: 0,
3487: 0,
3489: 13,
3492: 0,
3494: 0,
3496: 0,
3497: 2,
3498: 0,
3500: 1,
3503: 0,
3507: 1,
3508: 1,
3512: 2,
3516: 2,
3517: 2,
3520: 1,
3524: 0,
3530: 1,
3532: 4,
3535: 0,
3536: 0,
3539: 0,
3542: 10,
3544: 0,
3546: 3,
3547: 0,
3548: 3,
3549: 3,
3550: 3,
3551: 2,
3552: 2,
3553: 4,
3554: 4,
3555: 3,
3558: 2,
3559: 2,
3563: 0,
3564: 0,
3566: 0,
3577: 0,
3579: 0,
3580: 0,
3585: 0,
3586: 3,
3587: 0,
3588: 1,
3593: 0,
3594: 0,
3595: 14,
3597: 2,
3598: 2,
3599: 2,
3602: 3,
3604: 2,
3607: 0,
3608: 1,
3609: 6,
3611: 0,
3612: 0,
3614: 3,
3615: 0,
3616: 0,
3618: 2,
3619: 0,
3624: 0,
3625: 0,
3626: 1,
3627: 0,
3628: 0,
3629: 3,
3630: 3,
3631: 3,
3632: 1,
3633: 3,
3634: 1,
3636: 0,
3637: 2,
3644: 0,
3646: 3,
3647: 1,
3648: 2,
3649: 0,
3650: 2,
3652: 0,
3653: 3,
3660: 1,
3663: 2,
3664: 2,
3665: 0,
3669: 1,
3674: 0,
3676: 0,
3680: 0,
3681: 0,
3684: 1,
3686: 0,
3688: 0,
3695: 0,
3696: 1,
3697: 0,
3703: 0,
3704: 2,
3707: 0,
3708: 1,
3714: 3,
3720: 1,
3721: 7,
3722: 1,
3723: 0,
3724: 0,
3725: 0,
3730: 2,
3731: 1,
3732: 7,
3733: 2,
3735: 1,
3736: 0,
3737: 0,
3738: 0,
3739: 0,
3741: 1,
3743: 1,
3745: 3,
3748: 0,
3750: 1,
3754: 0,
3756: 0,
3757: 0,
3761: 0,
3762: 0,
3763: 2,
3765: 0,
3766: 8,
3767: 1,
3769: 0,
3773: 0,
3777: 2,
3778: 2,
3781: 0,
3784: 2,
3788: 0,
3789: 3,
3792: 0,
3794: 2,
3796: 0,
3802: 0,
3803: 2,
3808: 0,
3809: 0,
3810: 0,
3811: 0,
3812: 5,
3819: 0,
3821: 1,
3822: 4,
3823: 1,
3824: 1,
3827: 0,
3828: 0,
3831: 0,
3832: 3,
3833: 1,
3834: 0,
3835: 3,
3836: 0,
3842: 0,
3843: 0,
3845: 0,
3847: 1,
3849: 3,
3852: 10,
3853: 1,
3854: 2,
3860: 0,
3863: 0,
3865: 0,
3868: 0,
3869: 0,
3870: 0,
3876: 0,
3882: 3,
3884: 0,
3885: 4,
3887: 2,
3888: 2,
3889: 1,
3890: 7,
3893: 0,
3895: 8,
3896: 8,
3897: 8,
3898: 1,
3899: 1,
3900: 5,
3901: 2,
3903: 0,
3907: 0,
3909: 1,
3913: 0,
3921: 0,
3922: 0,
3925: 0,
3929: 0,
3932: 1,
3933: 2,
3938: 2,
3940: 0,
3941: 2,
3942: 3,
3944: 0,
3955: 1,
3956: 1,
3957: 0,
3958: 0,
3959: 0,
3960: 0,
3961: 0,
3962: 0,
3963: 0,
3964: 0,
3965: 0,
3966: 0,
3967: 0,
3968: 0,
3969: 0,
3970: 0,
3971: 0,
3972: 0,
3973: 0,
3974: 0,
3975: 0,
3976: 0,
3977: 0,
3978: 0,
3979: 0,
3980: 0,
3981: 0,
3982: 0,
3983: 0,
3984: 0,
3985: 0,
3986: 0,
3987: 0,
3988: 0,
3989: 0,
3990: 0,
3991: 0,
3992: 0,
3994: 0,
3997: 0,
3998: 2,
4003: 1,
4007: 0,
4008: 0,
4011: 0,
4012: 0,
4014: 0,
4016: 0,
4020: 2,
4024: 0,
4025: 0,
4026: 4,
4027: 3,
4029: 0,
4032: 0,
4033: 0,
4034: 15,
4038: 2,
4039: 2,
4040: 0,
4041: 0,
4050: 3,
4051: 1,
4063: 0,
4067: 0,
4069: 0,
4071: 0,
4072: 1,
4073: 1,
4075: 0,
4077: 0,
4084: 5,
4085: 1,
4090: 0,
4093: 0,
4096: 0,
4099: 0,
4101: 1,
4102: 1,
4103: 0,
4104: 2,
4106: 1,
4107: 1,
4108: 1,
4113: 1,
4114: 0,
4116: 2,
4117: 2,
4118: 2,
4119: 2,
4120: 1,
4122: 0,
4127: 0,
4128: 0,
4129: 1,
4130: 2,
4136: 0,
4137: 2,
4140: 0,
4144: 0,
4145: 0,
4148: 2,
4150: 0,
4151: 0,
4156: 2,
4159: 0,
4160: 0,
4161: 0,
4162: 0,
4163: 0,
4164: 0,
4165: 8,
4166: 8,
4167: 3,
4168: 3,
4169: 8,
4170: 8,
4171: 3,
4172: 8,
4173: 3,
4174: 8,
4175: 3,
4176: 8,
4179: 0,
4180: 2,
4181: 3,
4182: 0,
4184: 2,
4187: 0,
4188: 2,
4191: 0,
4194: 0,
4197: 0,
4200: 0,
4204: 0,
4205: 0,
4206: 0,
4207: 0,
4208: 6,
4211: 1,
4212: 0,
4215: 0,
4223: 0,
4225: 0,
4230: 0,
4234: 16,
4237: 10,
4238: 10,
4240: 1,
4241: 10,
4244: 10,
4250: 0,
4253: 0,
4259: 1,
4261: 2,
4262: 2,
4263: 3,
4266: 1,
4268: 15,
4269: 8,
4270: 0,
4271: 2,
4273: 1,
4274: 1,
4276: 1,
4282: 0,
4283: 0,
4292: 0,
4295: 0,
4298: 0,
4303: 0,
4304: 0,
4306: 2,
4308: 1,
4309: 1,
4310: 1,
4311: 0,
4312: 2,
4313: 0,
4314: 8,
4315: 1,
4316: 0,
4318: 0,
4319: 0,
4322: 0,
4325: 0,
4326: 0,
4329: 1,
4334: 0,
4337: 1,
4338: 1,
4339: 1,
4342: 2,
4361: 0,
4362: 3,
4364: 3,
4365: 2,
4370: 0,
4371: 0,
4376: 5,
4377: 5,
4378: 5,
4379: 5,
4380: 5,
4381: 5,
4382: 5,
4383: 2,
4385: 2,
4388: 1,
4390: 2,
4391: 3,
4393: 0,
4395: 1,
4397: 1,
4398: 1,
4400: 3,
4403: 0,
4406: 0,
4409: 0,
4410: 0,
4411: 0,
4413: 3,
4416: 1,
4417: 1,
4418: 1,
4419: 1,
4423: 1,
4426: 1,
4429: 0,
4432: 0,
4433: 2,
4438: 0,
4439: 0,
4440: 0,
4441: 0,
4444: 0,
4448: 0,
4451: 0,
4453: 1,
4454: 3,
4456: 0,
4457: 0,
4458: 0,
4464: 1,
4471: 1,
4472: 1,
4473: 1,
4474: 1,
4475: 1,
4476: 1,
4477: 1,
4478: 1,
4479: 1,
4480: 1,
4481: 1,
4488: 0,
4489: 0,
4491: 2,
4493: 2,
4495: 0,
4499: 1,
4501: 12,
4502: 1,
4503: 1,
4504: 0,
4507: 1,
4508: 2,
4509: 3,
4514: 2,
4515: 0,
4518: 1,
4521: 0,
4524: 0,
4528: 9,
4529: 9,
4534: 0,
4536: 0,
4537: 0,
4538: 0,
4539: 0,
4540: 1,
4541: 2,
4543: 0,
4545: 2,
4547: 0,
4549: 0,
4550: 6,
4551: 4,
4552: 2,
4553: 2,
4554: 0,
4555: 1,
4556: 12,
4557: 10,
4558: 15,
4559: 12,
4561: 11,
4562: 0,
4563: 1,
4570: 3,
4571: 3,
4572: 1,
4573: 2,
4574: 7,
4575: 7,
4576: 4,
4577: 6,
4578: 3,
4579: 2,
4580: 2,
4581: 3,
4582: 0,
4584: 3,
4587: 1,
4591: 0,
4593: 1,
4594: 0,
4596: 0,
4599: 2,
4603: 0,
4611: 0,
4614: 0,
4623: 0,
4624: 6,
4627: 0,
4629: 0,
4631: 0,
4632: 1,
4635: 0,
4636: 0,
4637: 0,
4638: 0,
4647: 0,
4649: 0,
4653: 0,
4655: 0,
4669: 0,
4670: 0,
4671: 0,
4676: 1,
4683: 0,
4684: 0,
4688: 3,
4689: 2,
4690: 3,
4691: 3,
4693: 1,
4696: 0,
4698: 3,
4699: 4,
4700: 2,
4701: 3,
4704: 0,
4705: 1,
4709: 15,
4710: 15,
4711: 2,
4712: 2,
4713: 2,
4714: 2,
4721: 3,
4722: 3,
4723: 2,
4725: 1,
4727: 3,
4728: 0,
4730: 0,
4731: 2,
4732: 0,
4733: 1,
4734: 7,
4745: 0,
4757: 0,
4763: 0,
4764: 0,
4765: 0,
4768: 0,
4769: 0,
4770: 0,
4772: 0,
4773: 0,
4774: 2,
4775: 0,
4776: 0,
4777: 0,
4781: 0,
4787: 0,
4788: 0,
4789: 0,
4790: 1,
4792: 0,
4793: 0,
4794: 0,
4795: 0,
4801: 0,
4802: 0,
4803: 0,
4804: 0,
4811: 0,
4812: 4,
4813: 3,
4817: 0,
4822: 0,
4829: 0,
4833: 0,
4840: 0,
4841: 0,
4842: 0,
4843: 0,
4844: 0,
4845: 0,
4846: 0,
4847: 0,
4848: 0,
4849: 0,
4850: 0,
4851: 0,
4852: 0,
4854: 1,
4855: 0,
4856: 0,
4857: 0,
4858: 0,
4859: 1,
4860: 2,
4862: 4,
4864: 0,
4865: 3,
4866: 3,
4867: 0,
4868: 3,
4870: 0,
4871: 4,
4872: 1,
4873: 1,
4875: 0,
4876: 0,
4878: 0,
4883: 2,
4885: 0,
4886: 0,
4889: 0,
4890: 0,
4892: 0,
4894: 0,
4896: 9,
4898: 0,
4900: 0,
4901: 12,
4903: 2,
4904: 0,
4907: 0,
4912: 3,
4913: 10,
4915: 0,
4916: 1,
4918: 0,
4919: 1,
4921: 1,
4922: 0,
4923: 2,
4929: 12,
4931: 2,
4933: 2,
4939: 0,
4942: 2,
4943: 1,
4950: 0,
4953: 0,
4954: 2,
4956: 0,
4959: 2,
4963: 1,
4965: 0,
4966: 0,
4969: 0,
4971: 0,
4973: 0,
4975: 0,
4979: 1,
4980: 0,
4981: 0,
4983: 0,
4985: 0,
4986: 2,
4989: 1,
4990: 7,
4991: 0,
4992: 2,
4994: 3,
4995: 7,
4996: 5,
4997: 1,
5001: 3,
5002: 3,
5014: 0,
5016: 0,
5019: 0,
5020: 0,
5025: 2,
5026: 2,
5027: 0,
5028: 1,
5029: 1,
5030: 1,
5031: 1,
5032: 1,
5042: 3,
5048: 0,
5051: 0,
5053: 0,
5055: 0,
5059: 0,
5060: 0,
5067: 3,
5104: 2,
5107: 0,
5109: 1,
5111: 3,
5113: 1,
5115: 0,
5117: 1,
5120: 0,
5122: 6,
5124: 4,
5125: 2,
5126: 2,
5127: 15,
5131: 0,
5133: 0,
5135: 0,
5138: 2,
5147: 2,
5148: 0,
5151: 0,
5155: 0,
5167: 0,
5168: 3,
5175: 0,
5178: 0,
5179: 0,
5180: 0,
5181: 1,
5183: 0,
5184: 0,
5187: 0,
5190: 0,
5192: 0,
5193: 0,
5195: 1,
5198: 0,
5199: 0,
5200: 0,
5202: 0,
5205: 0,
5207: 1,
5208: 2,
5209: 2,
5210: 2,
5211: 0,
5212: 5,
5219: 0,
5222: 0,
5224: 2,
5225: 0,
5227: 0,
5230: 2,
5236: 2,
5237: 1,
5240: 2,
5242: 0,
5244: 0,
5246: 2,
5247: 2,
5248: 3,
5249: 3,
5250: 4,
5257: 1,
5259: 0,
5260: 2,
5262: 0,
5264: 0,
5265: 0,
5266: 0,
5267: 0,
5268: 0,
5269: 0,
5270: 2,
5272: 3,
5275: 2,
5277: 2,
5279: 2,
5281: 2,
5283: 2,
5285: 2,
5288: 3,
5289: 3,
5290: 0,
5291: 0,
5293: 0,
5294: 0,
5301: 0,
5302: 2,
5303: 2,
5304: 2,
5307: 0,
5309: 1,
5311: 0,
5312: 0,
5313: 0,
5316: 0,
5317: 6,
5318: 0,
5321: 0,
5322: 0,
5325: 0,
5331: 0,
5335: 0,
5336: 0,
5337: 0,
5338: 0,
5339: 0,
5340: 0,
5341: 0,
5344: 0,
5350: 0,
5351: 0,
5353: 1,
5354: 1,
5355: 1,
5356: 0,
5357: 0,
5358: 0,
5361: 0,
5363: 0,
5365: 0,
5367: 0,
5368: 2,
5369: 0,
5372: 1,
5374: 1,
5375: 0,
5376: 7,
5377: 1,
5379: 1,
5383: 0,
5384: 2,
5385: 2,
5386: 2,
5387: 2,
5388: 2,
5389: 2,
5390: 0,
5393: 8,
5395: 0,
5399: 2,
5400: 1,
5401: 1,
5410: 10,
5411: 1,
5413: 0,
5414: 0,
5416: 2,
5417: 2,
5422: 2,
5433: 9,
5445: 1,
5448: 10,
5454: 1,
5455: 1,
5456: 1,
5465: 7,
5466: 0,
5468: 2,
5475: 0,
5498: 0,
5499: 0,
5501: 0,
5504: 0,
5505: 0,
5507: 3,
5508: 3,
5510: 0,
5512: 0,
5514: 0,
5518: 0,
5522: 0,
5526: 0,
5528: 0,
5529: 0,
5531: 1,
5532: 1,
5533: 1,
5534: 1,
5537: 0,
5538: 0,
5539: 11,
5542: 0,
5543: 1,
5545: 2,
5546: 0,
5548: 0,
5552: 0,
5553: 0,
5554: 2,
5555: 10,
5556: 10,
5559: 3,
5566: 3,
5572: 0,
5575: 0,
5578: 0,
5581: 0,
5582: 0,
5583: 1,
5585: 1,
5586: 1,
5587: 1,
5590: 1,
5591: 2,
5592: 14,
5593: 3,
5595: 3,
5598: 0,
5599: 2,
5600: 0,
5601: 0,
5603: 0,
5604: 1,
5607: 4,
5608: 2,
5609: 0,
5610: 0,
5611: 0,
5612: 6,
5613: 0,
5614: 2,
5615: 4,
5616: 2,
5617: 2,
5618: 3,
5619: 3,
5620: 1,
5621: 1,
5623: 0,
5624: 0,
5625: 0,
5626: 0,
5627: 0,
5629: 0,
5631: 0,
5635: 2,
5637: 0,
5639: 0,
5644: 0,
5647: 0,
5650: 0,
5653: 0,
5655: 0,
5657: 1,
5658: 1,
5666: 3,
5667: 0,
5673: 0,
5674: 0,
5675: 0,
5678: 0,
5679: 2,
5680: 1,
5681: 1,
5685: 0,
5686: 0,
5691: 0,
5692: 1,
5693: 0,
5695: 1,
5698: 0,
5706: 0,
5708: 1,
5711: 1,
5712: 1,
5714: 3,
5715: 3,
5716: 3,
5717: 3,
5718: 3,
5719: 0,
5720: 0,
5725: 1,
5727: 2,
5730: 0,
5731: 1,
5732: 0,
5739: 0,
5740: 0,
5742: 0,
5748: 0,
5749: 0,
5751: 0,
5753: 0,
5754: 0,
5762: 0,
5764: 0,
5765: 0,
5766: 1,
5767: 0,
5771: 0,
5772: 0,
5773: 0,
5774: 0,
5775: 0,
5776: 0,
5777: 0,
5784: 0,
5786: 0,
5789: 0,
5790: 0,
5791: 0,
5792: 0,
5794: 0,
5799: 0,
5800: 11,
5802: 0,
5804: 0,
5805: 0,
5810: 0,
5811: 0,
5812: 1,
5816: 0,
5818: 0,
5819: 0,
5820: 1,
5821: 3,
5822: 2,
5823: 1,
5824: 1,
5825: 1,
5833: 3,
5835: 3,
5837: 0,
5838: 3,
5840: 3,
5843: 0,
5844: 0,
5849: 0,
5851: 2,
5855: 1,
5856: 1,
5857: 1,
5858: 1,
5860: 2,
5861: 0,
5863: 0,
5864: 0,
5865: 0,
5866: 0,
5871: 1,
5873: 1,
5876: 1,
5877: 1,
5878: 0,
5882: 0,
5884: 2,
5885: 1,
5886: 1,
5887: 0,
5890: 0,
5891: 0,
5892: 0,
5894: 0,
5895: 0,
5896: 0,
5897: 0,
5898: 8,
5899: 8,
5900: 1,
5901: 2,
5902: 2,
5903: 15,
5904: 2,
5905: 2,
5906: 9,
5907: 2,
5908: 2,
5910: 0,
5912: 2,
5914: 1,
5923: 2,
5924: 2,
5925: 1,
5926: 1,
5927: 1,
5928: 1,
5932: 1,
5934: 0,
5936: 0,
5937: 0,
5938: 1,
5943: 0,
5944: 0,
5945: 0,
5955: 0,
5957: 0,
5959: 0,
5961: 0,
5963: 0,
5968: 0,
5969: 2,
5977: 2,
5978: 0,
5979: 0,
5982: 13,
5983: 13,
5984: 10,
5985: 10,
5986: 7,
5987: 7,
5988: 9,
5989: 9,
5990: 9,
5991: 9,
5998: 1,
6000: 0,
6001: 0,
6002: 0,
6003: 0,
6006: 0,
6007: 2,
6008: 0,
6009: 1,
6011: 0,
6012: 2,
6019: 2,
6021: 13,
6023: 13,
6026: 1,
6037: 1,
6038: 1,
6042: 1,
6043: 12,
6048: 2,
6050: 2,
6052: 2,
6055: 0,
6059: 1,
6060: 1,
6061: 0,
6062: 0,
6064: 0,
6065: 6,
6066: 6,
6070: 0,
6072: 3,
6074: 3,
6076: 3,
6077: 0,
6078: 9,
6079: 9,
6080: 9,
6081: 9,
6082: 9,
6083: 10,
6084: 10,
6085: 10,
6086: 10,
6087: 1,
6088: 1,
6089: 1,
6090: 1,
6091: 13,
6092: 10,
6093: 10,
6094: 10,
6095: 7,
6096: 7,
6097: 10,
6101: 0,
6103: 0,
6105: 0,
6106: 0,
6107: 0,
6108: 0,
6109: 0,
6115: 6,
6126: 0,
6127: 0,
6128: 0,
6130: 0,
6131: 0,
6133: 0,
6134: 0,
6136: 1,
6137: 1,
6140: 0,
6141: 0,
6142: 0,
6143: 0,
6144: 0,
6154: 0,
6155: 0,
6156: 0,
6157: 0,
6161: 1,
6162: 1,
6163: 0,
6164: 1,
6165: 0,
6166: 1,
6167: 12,
6170: 13,
6171: 2,
6172: 7,
6174: 3,
6175: 0,
6176: 0,
6183: 2,
6185: 2,
6186: 0,
6187: 3,
6188: 3,
6190: 3,
6191: 1,
6193: 0,
6194: 14,
6195: 2,
6196: 2,
6199: 0,
6200: 0,
6202: 2,
6204: 9,
6207: 2,
6208: 2,
6209: 2,
6210: 0,
6212: 4,
6214: 2,
6217: 0,
6219: 0,
6222: 0,
6224: 0,
6225: 0,
6230: 0,
6232: 0,
6235: 1,
6237: 1,
6239: 2,
6240: 7,
6241: 7,
6242: 0,
6244: 0,
6245: 1,
6247: 5,
6248: 5,
6251: 4,
6252: 1,
6253: 1,
6254: 4,
6256: 2,
6257: 2,
6258: 2,
6260: 10,
6261: 1,
6262: 1,
6263: 1,
6264: 11,
6265: 11,
6266: 17,
6270: 3,
6271: 0,
6272: 2,
6273: 16,
6274: 2,
6280: 0,
6283: 1,
6291: 12,
6296: 0,
6297: 0,
6299: 0,
6302: 0,
6311: 0,
6312: 0,
6319: 2,
6320: 1,
6321: 1,
6323: 1,
6324: 1,
6327: 2,
6329: 0,
6330: 1,
6331: 2,
6332: 2,
6333: 7,
6336: 0,
6337: 0,
6338: 0,
6339: 0,
6340: 0,
6341: 1,
6342: 0,
6344: 0,
6347: 2,
6349: 1,
6351: 0,
6352: 0,
6353: 0,
6354: 0,
6355: 0,
6356: 0,
6357: 0,
6359: 3,
6360: 1,
6364: 3,
6367: 2,
6371: 2,
6373: 0,
6375: 2,
6376: 1,
6378: 0,
6380: 0,
6384: 1,
6386: 2,
6388: 1,
6397: 0,
6399: 1,
6401: 0,
6402: 7,
6403: 0,
6406: 1,
6412: 0,
6418: 0,
6420: 0,
6421: 0,
6422: 0,
6423: 0,
6424: 0,
6425: 0,
6427: 0,
6428: 2,
6429: 1,
6430: 3,
6431: 0,
6432: 2,
6435: 1,
6438: 1,
6439: 0,
6441: 1,
6447: 0,
6448: 0,
6449: 0,
6457: 0,
6459: 0,
6461: 0,
6463: 0,
6465: 0,
6468: 0,
6476: 0,
6480: 2,
6491: 0,
6492: 0,
6493: 0,
6495: 3,
6504: 2,
6507: 1,
6509: 0,
6510: 1,
6514: 0,
6515: 2,
6518: 1,
6522: 2,
6523: 2,
6524: 2,
6525: 2,
6526: 2,
6529: 1,
6531: 2,
6536: 1,
6539: 1,
6544: 0,
6548: 0,
6549: 12,
6550: 1,
6553: 1,
6554: 0,
6555: 3,
6556: 0,
6564: 1,
6566: 2,
6568: 3,
6569: 0,
6570: 0,
6576: 1,
6578: 3,
6579: 2,
6583: 13,
6586: 2,
6587: 2,
6590: 0,
6594: 2,
6595: 0,
6596: 2,
6601: 2,
6602: 2,
6603: 2,
6604: 2,
6605: 2,
6606: 2,
6608: 3,
6610: 0,
6612: 1,
6613: 1,
6614: 1,
6616: 2,
6617: 3,
6620: 2,
6623: 2,
6625: 0,
6627: 2,
6628: 0,
6629: 0,
6630: 10,
6631: 0,
6632: 12,
6635: 0,
6636: 0,
6637: 0,
6643: 0,
6644: 0,
6646: 0,
6647: 2,
6649: 0,
6651: 0,
6652: 0,
6657: 0,
6662: 1,
6666: 0,
6667: 0,
6670: 1,
6672: 0,
6683: 7,
6684: 3,
6685: 0,
6686: 1,
6687: 2,
6688: 2,
6689: 1,
6690: 1,
6691: 2,
6692: 0,
6694: 3,
6695: 3,
6696: 3,
6697: 3,
6699: 2,
6701: 1,
6702: 3,
6708: 0,
6718: 1,
6721: 1,
6722: 1,
6724: 0,
6725: 0,
6727: 1,
6728: 0,
6729: 0,
6731: 0,
6738: 0,
6740: 0,
6743: 0,
6747: 0,
6750: 0,
6754: 0,
6755: 2,
6756: 2,
6758: 2,
6759: 2,
6760: 5,
6761: 1,
6763: 2,
6766: 4,
6769: 0,
6770: 3,
6772: 2,
6773: 2,
6774: 2,
6776: 2,
6782: 0,
6783: 0,
6784: 0,
6796: 1,
6800: 2,
6802: 0,
6804: 2,
6805: 0,
6807: 0,
6814: 1,
6816: 1,
6817: 2,
6820: 1,
6821: 0,
6823: 0,
6832: 1,
6833: 1,
6835: 5,
6836: 9,
6838: 0,
6847: 0,
6848: 3,
6849: 0,
6850: 4,
6853: 0,
6855: 0,
6858: 0,
6862: 1,
6863: 6,
6866: 1,
6867: 1,
6868: 0,
6869: 1,
6871: 0,
6876: 0,
6883: 0,
6884: 3,
6885: 2,
6887: 2,
6889: 2,
6892: 1,
6893: 1,
6894: 1,
6895: 1,
6898: 1,
6902: 1,
6907: 1,
6910: 0,
6912: 3,
6913: 2,
6915: 1,
6919: 3,
6920: 3,
6921: 3,
6923: 0,
6927: 0,
6929: 0,
6932: 0,
6933: 0,
6941: 0,
6944: 0,
6946: 0,
6948: 0,
6950: 0,
6952: 0,
6954: 0,
6956: 0,
6958: 0,
6959: 0,
6960: 0,
6961: 0,
6965: 0,
6966: 0,
6967: 0,
6973: 0,
6974: 0,
6975: 0,
6977: 0,
6978: 1,
6979: 1,
6980: 1,
6982: 9,
6983: 1,
6985: 1,
6986: 0,
6990: 0,
6991: 0,
6992: 0,
6995: 1,
7003: 3,
7004: 0,
7009: 0,
7012: 0,
7018: 0,
7019: 7,
7021: 2,
7023: 2,
7024: 3,
7029: 2,
7032: 1,
7033: 1,
7035: 2,
7041: 1,
7051: 2,
7053: 1,
7054: 1,
7055: 0,
7057: 2,
7058: 1,
7060: 2,
7062: 2,
7068: 0,
7069: 1,
7075: 1,
7078: 7,
7081: 0,
7082: 2,
7090: 0,
7097: 0,
7101: 0,
7102: 0,
7104: 0,
7107: 0,
7109: 0,
7111: 0,
7113: 0,
7115: 0,
7117: 0,
7119: 0,
7121: 0,
7123: 0,
7125: 0,
7127: 0,
7129: 0,
7134: 0,
7135: 1,
7137: 0,
7141: 0,
7164: 1,
7165: 1,
7167: 1,
7170: 0,
7175: 2,
7176: 0,
7178: 0,
7183: 1,
7184: 0,
7186: 0,
7188: 0,
7189: 0,
7218: 9,
7220: 9,
7221: 9,
7222: 9,
7223: 9,
7224: 9,
7225: 9,
7226: 9,
7228: 0,
7229: 2,
7232: 2,
7238: 0,
7239: 0,
7241: 0,
7243: 0,
7245: 0,
7246: 1,
7248: 0,
7251: 0,
7252: 0,
7253: 1,
7255: 1,
7267: 1,
7268: 1,
7269: 1,
7270: 1,
7271: 1,
7272: 1,
7273: 1,
7274: 1,
7275: 9,
7276: 9,
7277: 9,
7278: 9,
7279: 1,
7280: 9,
7281: 9,
7282: 9,
7285: 1,
7289: 3,
7291: 1,
7295: 0,
7297: 0,
7300: 0,
7301: 0,
7303: 0,
7308: 0,
7309: 0,
7314: 0,
7316: 0,
7319: 0,
7320: 9,
7321: 1,
7323: 0,
7332: 3,
7333: 0,
7349: 1,
7350: 1,
7351: 1,
7352: 1,
7354: 1,
7355: 7,
7360: 1,
7361: 1,
7362: 1,
7365: 1,
7367: 0,
7370: 0,
7371: 0,
7372: 0,
7373: 2,
7375: 0,
7379: 1,
7380: 1,
7381: 1,
7382: 1,
7383: 1,
7386: 1,
7388: 0,
7390: 0,
7393: 0,
7394: 0,
7395: 0,
7398: 0,
7402: 1,
7403: 1,
7405: 0,
7409: 1,
7410: 1,
7411: 1,
7414: 0,
7416: 0,
7419: 0,
7420: 2,
7421: 1,
7422: 1,
7425: 0,
7428: 1,
7429: 0,
7435: 0,
7437: 1,
7438: 0,
7441: 1,
7443: 0,
7451: 2,
7453: 1,
7456: 3,
7463: 1,
7468: 1,
7479: 1,
7480: 1,
7495: 2,
7496: 2,
7497: 1,
7501: 1,
7502: 1,
7503: 9,
7504: 0,
7520: 1,
7521: 1,
7522: 1,
7523: 1,
7524: 1,
7525: 1,
7526: 1,
7527: 1,
7538: 6,
7539: 1,
7546: 0,
7547: 0,
7548: 0,
7549: 0,
7550: 0,
7551: 0,
7553: 0,
7558: 1,
7559: 0,
7564: 2,
7565: 2,
7566: 1,
7568: 2,
7584: 0,
7591: 0,
7594: 0,
7599: 0,
7600: 1,
7603: 1,
7608: 0,
7611: 2,
7613: 2,
7614: 9,
7619: 2,
7620: 2,
7621: 2,
7622: 2,
7624: 0,
7627: 0,
7632: 0,
7633: 0,
7634: 0,
7639: 0,
7640: 1,
7641: 0,
7643: 2,
7645: 0,
7647: 0,
7650: 6,
7652: 0,
7654: 2,
7656: 2,
7657: 2,
7658: 3,
7664: 2,
7666: 2,
7668: 3,
7671: 0,
7673: 0,
7676: 0,
7682: 0,
7684: 0,
7685: 1,
7687: 0,
7698: 0,
7700: 0,
7703: 0,
7704: 0,
7710: 0,
7712: 6,
7715: 0,
7718: 1,
7719: 1,
7721: 1,
7722: 8,
7723: 0,
7727: 0,
7731: 0,
7734: 0,
7735: 0,
7736: 3,
7742: 0,
7743: 0,
7744: 0,
7747: 1,
7748: 0,
7749: 0,
7750: 0,
7751: 0,
7752: 0,
7753: 0,
7754: 3,
7755: 1,
7758: 0,
7760: 0,
7763: 0,
7765: 1,
7767: 0,
7770: 0,
7772: 3,
7776: 1,
7777: 1,
7779: 0,
7781: 0,
7783: 0,
7784: 0,
7785: 0,
7790: 0,
7791: 1,
7792: 0,
7793: 2,
7799: 1,
7803: 0,
7807: 0,
7808: 0,
7810: 0,
7811: 1,
7816: 2,
7817: 0,
7820: 1,
7823: 0,
7831: 1,
7833: 0,
7834: 0,
7835: 0,
7836: 0,
7839: 0,
7847: 2,
7848: 2,
7852: 1,
7853: 1,
7854: 1,
7855: 1,
7858: 1,
7861: 0,
7864: 0,
7866: 1,
7868: 0,
7869: 1,
7870: 0,
7873: 0,
7874: 0,
7875: 0,
7876: 0,
7877: 5,
7879: 1,
7881: 0,
7884: 0,
7886: 0,
7887: 1,
7888: 1,
7890: 9,
7893: 0,
7895: 1,
7896: 0,
7897: 0,
7898: 3,
7902: 6,
7903: 1,
7904: 1,
7907: 0,
7908: 0,
7911: 0,
7913: 1,
7914: 0,
7918: 0,
7919: 0,
7920: 2,
7921: 0,
7923: 1,
7925: 0,
7927: 0,
7928: 0,
7931: 0,
7933: 0,
7934: 1,
7935: 4,
7938: 0,
7940: 1,
7941: 1,
7942: 0,
7944: 1,
7948: 1,
7949: 1,
7950: 1,
7951: 0,
7954: 1,
7955: 2,
7956: 2,
7957: 2,
7961: 2,
7963: 1,
7965: 9,
7966: 9,
7970: 1,
7974: 1,
7975: 1,
7979: 1,
7981: 0,
7983: 0,
7988: 1,
7991: 7,
7992: 15,
7993: 3,
7994: 3,
7997: 6,
8007: 1,
8008: 0,
8011: 2,
8013: 0,
8014: 7,
8015: 2,
8019: 1,
8023: 2,
8024: 7,
8025: 0,
8027: 0,
8033: 1,
8034: 0,
8037: 1,
8040: 11,
8041: 0,
8046: 0,
8047: 1,
8048: 1,
8054: 2,
8057: 0,
8059: 1,
8060: 8,
8061: 8,
8067: 2,
8068: 2,
8070: 0,
8072: 0,
8074: 3,
8075: 2,
8079: 2,
8081: 2,
8085: 3,
8086: 3,
8091: 1,
8099: 0,
8103: 1,
8104: 15,
8105: 0,
8108: 1,
8113: 1,
8125: 1,
8127: 1,
8131: 2,
8135: 2,
8137: 3,
8138: 0,
8140: 0,
8145: 3,
8146: 2,
8147: 2,
8148: 8,
8149: 8,
8152: 1,
8157: 1,
8158: 0,
8159: 0,
8163: 0,
8165: 0,
8170: 0,
8171: 0,
8173: 0,
8175: 0,
8178: 0,
8180: 0,
8181: 0,
8183: 2,
8185: 1,
8194: 1,
8195: 0,
8197: 1,
8201: 0,
8202: 2,
8204: 2,
8206: 0,
8208: 0,
8211: 0,
8212: 0,
8215: 0,
8219: 0,
8220: 1,
8221: 0,
8225: 2,
8227: 0,
8229: 1,
8230: 8,
8231: 5,
8240: 7,
8244: 2,
8246: 2,
8247: 0,
8251: 2,
8252: 1,
8257: 1,
8260: 1,
8263: 0,
8266: 0,
8269: 0,
8272: 2,
8275: 0,
8277: 0,
8280: 0,
8281: 0,
8283: 0,
8284: 0,
8296: 0,
8300: 0,
8302: 0,
8305: 0,
8307: 0,
8310: 1,
8311: 0,
8315: 0,
8316: 1,
8317: 3,
8319: 2,
8320: 1,
8321: 0,
8322: 0,
8329: 0,
8330: 0,
8332: 0,
8341: 0,
8343: 2,
8348: 0,
8349: 1,
8350: 7,
8353: 2,
8354: 4,
8355: 3,
8358: 0,
8361: 1,
8364: 1,
8371: 4,
8373: 0,
8376: 0,
8377: 0,
8378: 3,
8379: 0,
8388: 0,
8391: 8,
8395: 1,
8396: 0,
8399: 1,
8403: 3,
8405: 3,
8413: 1,
8414: 0,
8419: 0,
8420: 8,
8421: 8,
8422: 8,
8423: 8,
8424: 1,
8426: 0,
8427: 0,
8429: 0,
8431: 1,
8432: 1,
8435: 2,
8436: 2,
8443: 1,
8444: 1,
8445: 1,
8447: 1,
8450: 0,
8451: 0,
8452: 0,
8459: 2,
8460: 0,
8467: 0,
8468: 2,
8471: 2,
8472: 0,
8479: 4,
8481: 4,
8485: 1,
8488: 1,
8489: 0,
8491: 2,
8493: 0,
8494: 0,
8495: 1,
8497: 1,
8499: 5,
8500: 2,
8501: 0,
8504: 1,
8505: 1,
8507: 0,
8511: 1,
8514: 1,
8520: 0,
8526: 0,
8529: 2,
8531: 0,
8535: 0,
8536: 0,
8537: 1,
8541: 3,
8545: 2,
8546: 3,
8549: 0,
8552: 0,
8553: 1,
8555: 1,
8559: 0,
8560: 1,
8561: 0,
8565: 0,
8567: 0,
8571: 0,
8573: 1,
8575: 1,
8576: 0,
8583: 0,
8584: 0,
8585: 2,
8588: 1,
8589: 1,
8590: 3,
8592: 7,
8593: 3,
8594: 1,
8599: 1,
8602: 0,
8605: 2,
8606: 0,
8607: 2,
8608: 0,
8609: 0,
8610: 2,
8621: 0,
8625: 1,
8632: 1,
8638: 0,
8639: 1,
8640: 0,
8645: 0,
8648: 2,
8649: 1,
8655: 0,
8659: 3,
8661: 2,
8663: 0,
8665: 1,
8666: 1,
8668: 1,
8670: 0,
8672: 1,
8673: 0,
8674: 0,
8675: 0,
8677: 0,
8679: 3,
8680: 3,
8681: 3,
8682: 3,
8684: 0,
8686: 3,
8687: 1,
8688: 1,
8689: 5,
8690: 6,
8692: 2,
8694: 0,
8695: 1,
8698: 2,
8701: 0,
8702: 1,
8706: 0,
8707: 0,
8709: 0,
8711: 0,
8712: 0,
8714: 0,
8737: 1,
8738: 1,
8742: 0,
8743: 0,
8745: 1,
8746: 0,
8747: 0,
8748: 0,
8759: 0,
8763: 1,
8765: 2,
8766: 2,
8767: 2,
8768: 0,
8769: 0,
8770: 0,
8771: 2,
8772: 0,
8775: 3,
8794: 0,
8796: 4,
8797: 7,
8803: 3,
8818: 0,
8841: 0,
8844: 1,
8848: 1,
8866: 1,
8869: 1,
8873: 0,
8874: 0,
8876: 1,
8878: 2,
8879: 2,
8883: 0,
8884: 1,
8885: 1,
8886: 1,
8890: 1,
8891: 1,
8892: 1,
8893: 3,
8894: 3,
8895: 4,
8896: 4,
8900: 4,
8902: 0,
8907: 0,
8909: 0,
8910: 1,
8917: 2,
8918: 1,
8919: 1,
8926: 1,
8933: 0,
8939: 2,
8942: 3,
8943: 3,
8944: 3,
8945: 3,
8950: 0,
8951: 4,
8954: 3,
8957: 0,
8959: 1,
8960: 0,
8961: 0,
8963: 1,
8966: 1,
8969: 1,
8972: 0,
8973: 0,
8974: 1,
8977: 4,
8979: 0,
8980: 0,
8982: 1,
8983: 1,
8986: 1,
8987: 1,
8988: 0,
8991: 1,
8995: 0,
8996: 0,
8997: 0,
8998: 0,
8999: 0,
9000: 0,
9017: 0,
9019: 0,
9022: 0,
9028: 3,
9029: 14,
9030: 2,
9031: 3,
9032: 0,
9034: 2,
9036: 0,
9037: 2,
9039: 0,
9040: 0,
9043: 3,
9044: 0,
9045: 0,
9046: 0,
9047: 0,
9048: 0,
9049: 0,
9050: 0,
9051: 0,
9052: 0,
9053: 0,
9054: 0,
9057: 0,
9058: 0,
9061: 1,
9068: 1,
9069: 1,
9070: 0,
9071: 1,
9073: 1,
9074: 1,
9075: 1,
9078: 13,
9079: 1,
9081: 1,
9082: 0,
9084: 0,
9086: 2,
9087: 1,
9090: 1,
9091: 4,
9092: 1,
9094: 0,
9098: 0,
9099: 3,
9102: 2,
9105: 0,
9106: 0,
9113: 0,
9114: 0,
9115: 0,
9116: 0,
9120: 0,
9121: 0,
9122: 0,
9123: 0,
9125: 0,
9131: 2,
9144: 0,
9146: 6,
9148: 3,
9149: 3,
9151: 4,
9154: 0,
9160: 0,
9161: 0,
9163: 0,
9167: 0,
9169: 0,
9176: 1,
9177: 0,
9178: 0,
9188: 0,
9192: 0,
9194: 0,
9195: 0,
9199: 2,
9202: 0,
9203: 0,
9209: 0,
9211: 1,
9212: 0,
9216: 17,
9217: 14,
9219: 1,
9221: 0,
9226: 0,
9227: 0,
9228: 0,
9229: 0,
9230: 1,
9232: 1,
9233: 1,
9235: 0,
9236: 2,
9238: 9,
9241: 1,
9242: 0,
9243: 2,
9247: 4,
9248: 2,
9249: 2,
9254: 0,
9257: 2,
9261: 0,
9262: 0,
9265: 1,
9268: 1,
9269: 1,
9270: 1,
9271: 0,
9273: 0,
9278: 1,
9279: 1,
9281: 0,
9283: 0,
9285: 1,
9289: 0,
9290: 0,
9291: 0,
9292: 2,
9293: 1,
9295: 0,
9296: 0,
9305: 0,
9307: 0,
9312: 2,
9313: 3,
9314: 3,
9315: 2,
9317: 1,
9319: 1,
9320: 1,
9321: 2,
9322: 1,
9323: 1,
9328: 1,
9330: 1,
9332: 1,
9344: 0,
9345: 0,
9346: 0,
9350: 0,
9352: 0,
9353: 2,
9355: 0,
9359: 1,
9362: 1,
9366: 3,
9367: 4,
9368: 2,
9369: 3,
9370: 2,
9371: 2,
9372: 2,
9374: 2,
9376: 1,
9377: 1,
9378: 2,
9379: 3,
9380: 2,
9381: 2,
9382: 3,
9383: 1,
9384: 0,
9385: 0,
9391: 1,
9392: 0,
9393: 1,
9394: 1,
9397: 1,
9398: 1,
9400: 0,
9403: 0,
9404: 0,
9406: 1,
9407: 2,
9408: 1,
9409: 9,
9410: 9,
9411: 9,
9414: 2,
9419: 0,
9423: 0,
9425: 2,
9426: 0,
9427: 1,
9428: 2,
9429: 1,
9431: 5,
9432: 7,
9437: 3,
9438: 1,
9439: 1,
9445: 1,
9446: 0,
9450: 0,
9451: 0,
9452: 1,
9453: 1,
9454: 0,
9458: 0,
9459: 0,
9461: 0,
9463: 1,
9467: 0,
9468: 1,
9469: 1,
9470: 1,
9473: 1,
9474: 1,
9475: 1,
9476: 1,
9477: 1,
9478: 0,
9479: 1,
9481: 0,
9483: 0,
9485: 1,
9487: 1,
9490: 0,
9492: 2,
9493: 0,
9494: 7,
9495: 2,
9496: 2,
9497: 1,
9507: 1,
9510: 1,
9511: 1,
9512: 0,
9513: 0,
9517: 1,
9520: 1,
9542: 2,
9543: 2,
9544: 2,
9545: 2,
9546: 0,
9547: 0,
9553: 2,
9554: 2,
9555: 1,
9556: 7,
9557: 3,
9558: 2,
9568: 0,
9570: 0,
9572: 0,
9574: 1,
9576: 1,
9578: 0,
9579: 1,
9581: 4,
9587: 0,
9588: 0,
9589: 0,
9590: 0,
9591: 0,
9598: 0,
9606: 1,
9607: 0,
9608: 0,
9612: 2,
9616: 3,
9618: 1,
9620: 4,
9622: 1,
9623: 1,
9624: 1,
9625: 1,
9628: 3,
9634: 2,
9645: 0,
9646: 1,
9647: 0,
9648: 0,
9649: 1,
9650: 0,
9651: 0,
9652: 0,
9653: 0,
9654: 0,
9655: 0,
9656: 0,
9657: 0,
9658: 0,
9661: 0,
9663: 0,
9665: 0,
9667: 1,
9670: 1,
9671: 1,
9674: 1,
9676: 1,
9683: 2,
9684: 3,
9686: 2,
9688: 3,
9695: 0,
9698: 1,
9701: 0,
9702: 0,
9703: 0,
9704: 0,
9705: 0,
9706: 1,
9707: 1,
9708: 0,
9710: 13,
9712: 0,
9713: 1,
9714: 0,
9715: 13,
9718: 0,
9731: 0,
9732: 0,
9733: 0,
9734: 0,
9735: 0,
9736: 2,
9741: 0,
9743: 1,
9744: 0,
9745: 0,
9746: 0,
9748: 1,
9749: 0,
9750: 1,
9751: 1,
9752: 0,
9753: 0,
9754: 0,
9755: 0,
9756: 3,
9757: 0,
9758: 0,
9759: 0,
9766: 2,
9768: 2,
9769: 3,
9770: 3,
9772: 1,
9784: 3,
9785: 0,
9790: 0,
9794: 1,
9795: 1,
9796: 0,
9799: 0,
9801: 0,
9803: 0,
9805: 0,
9809: 0,
9811: 3,
9813: 0,
9814: 0,
9815: 0,
30027: 0,
30028: 0,
} | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/pricing/chain/_display_template.py | 0.434701 | 0.615637 | _display_template.py | pypi |
import copy
from typing import Any, Callable, List, TYPE_CHECKING
from ._stream import StreamingChain
from ...._core.session import get_valid_session
from ...._tools import cached_property, create_repr
from ....delivery._stream.base_stream import StreamOpenWithUpdatesMixin
if TYPE_CHECKING:
from ...._types import ExtendedParams, OptBool, OptInt, OptStr
from ...._core.session import Session
class Stream(StreamOpenWithUpdatesMixin):
"""
Stream is designed to request streaming chains and decode it dynamically.
This class also act like a cache for each part of the chain record.
Parameters
----------
name : str
Single instrument name
session : Session, optional
The Session defines the source where you want to retrieve your data
service : str, optional
Name service
skip_summary_links : bool, optional
Store skip summary links
skip_empty : bool, optional
Store skip empty
override_summary_links : int, optional
Store the override number of summary links
extended_params : dict, optional
If necessary other parameters
Methods
-------
open(**kwargs)
Open the Stream connection
close()
Closes the Stream connection, releases resources
is_chain
True - stream was decoded as a chain
False - stream wasn't identified as a chain
Attributes
__________
constituents: list
A list of constituents in the chain record or empty list
"""
def __init__(
self,
name: str,
session: "Session" = None,
service: "OptStr" = None,
skip_summary_links: "OptBool" = True,
skip_empty: "OptBool" = True,
override_summary_links: "OptInt" = None,
extended_params: "ExtendedParams" = None,
) -> None:
self._session = get_valid_session(session)
self._always_use_default_session = session is None
self._name = name
self._service = service
self._skip_summary_links = skip_summary_links
self._skip_empty = skip_empty
self._override_summary_links = override_summary_links
self._extended_params = extended_params
@cached_property
def _stream(self) -> StreamingChain:
streaming_chain = StreamingChain(
name=self._name,
session=self._session,
service=self._service,
skip_summary_links=self._skip_summary_links,
skip_empty=self._skip_empty,
override_summary_links=self._override_summary_links,
extended_params=self._extended_params,
)
return streaming_chain
@property
def name(self) -> str:
return self._stream.name
@property
def is_chain(self) -> bool:
return self._stream.is_chain
@property
def num_summary_links(self) -> int:
return self._stream.num_summary_links
@property
def summary_links(self) -> List[str]:
return self._stream.summary_links
@property
def display_name(self) -> str:
return self._stream.display_name
@property
def constituents(self) -> List[str]:
return copy.deepcopy(self._stream.get_constituents())
def on_add(self, func: Callable[[int, str, "Stream"], Any]) -> "Stream":
func = make_callback(self, func)
self._stream.on_add(func)
return self
def on_remove(self, func: Callable[[str, int, "Stream"], Any]) -> "Stream":
func = make_callback(self, func)
self._stream.on_remove(func)
return self
def on_update(self, func: Callable[[str, str, int, "Stream"], Any]) -> "Stream":
func = make_callback(self, func)
self._stream.on_update(func)
return self
def on_complete(self, func: Callable[[list, "Stream"], Any]) -> "Stream":
func = make_callback(self, func)
self._stream.on_complete(func)
return self
def on_error(self, func: Callable[[tuple, str, "Stream"], Any]) -> "Stream":
func = make_error_callback(self, func)
self._stream.on_error(func)
return self
def __repr__(self):
return create_repr(
self,
content=f"{{name='{self._name}'}}",
class_name=self.__class__.__name__,
)
def make_callback(stream: Stream, func: Callable) -> Callable:
"""Return a callback functions with correct arguments order."""
def callback(*args):
func(*args, stream)
return callback
def make_error_callback(stream: Stream, func: Callable) -> Callable:
"""Return a callback function with correct arguments order for error handling."""
def callback(*args):
args = reversed(args)
func(*args, stream)
return callback | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/pricing/chain/_stream_facade.py | 0.865253 | 0.233624 | _stream_facade.py | pypi |
from typing import Optional, TYPE_CHECKING
from ._stream_facade import Stream
from ...._tools import create_repr, validate_types
if TYPE_CHECKING:
from ...._types import ExtendedParams
from ...._core.session import Session
class Definition(object):
"""
Creates a definition of information about the specified chains to request and decode them dynamically.
Parameters
----------
name : str
Single instrument chain name.
service : str, optional
Streaming service name.
skip_summary_links : bool, optional
If True - summary links will be skipped.
skip_empty : bool, optional
If True - empty data items will be skipped.
override_summary_links : int, optional
Number of summary links that can be overridden.
extended_params : dict, optional
Specifies the parameters that will be merged with the request.
Examples
--------
>>> from refinitiv.data.content.pricing import chain
>>> definition_chain = chain.Definition("0#.FTSE")
"""
def __init__(
self,
name: str,
service: Optional[str] = None,
# option for chain constituents
skip_summary_links: bool = True,
skip_empty: bool = True,
override_summary_links: Optional[int] = None,
extended_params: "ExtendedParams" = None,
):
validate_types(override_summary_links, [int, type(None)], "override_summary_links")
self._name = name
self._service = service
self._skip_summary_links = skip_summary_links
self._skip_empty = skip_empty
self._override_summary_links = override_summary_links
self._extended_params = extended_params
def __repr__(self):
return create_repr(
self,
content=f"{{name='{self._name}'}}",
)
def get_stream(
self,
session: "Session" = None,
) -> Stream:
"""
Creates and returns the pricing chain object that allows you to get streaming data for previously defined
chains.
Parameters
----------
session : Session, optional
Session object. If it's not passed the default session will be used.
Returns
-------
chain.Stream
Examples
-------
Create a chain.Stream object
>>> from refinitiv.data.content.pricing import chain
>>> definition_chain = chain.Definition("0#.FTSE")
>>> chain_stream = definition_chain.get_stream()
Open the Stream connection
>>> from refinitiv.data.content.pricing import chain
>>> definition_chain = chain.Definition("0#.FTSE")
>>> chain_stream = definition_chain.get_stream()
>>> chain_stream.open()
Closes the Stream connection
>>> from refinitiv.data.content.pricing import chain
>>> definition_chain = chain.Definition("0#.FTSE")
>>> chain_stream = definition_chain.get_stream()
>>> chain_stream.open()
>>> chain_stream.close()
Call constituents
>>> from refinitiv.data.content.pricing import chain
>>> definition_chain = chain.Definition("0#.FTSE")
>>> chain_stream = definition_chain.get_stream()
>>> chain_stream.open()
>>> chain_stream.constituents
Call property is_chain
>>> from refinitiv.data.content.pricing import chain
>>> definition_chain = chain.Definition("0#.FTSE")
>>> chain_stream = definition_chain.get_stream()
>>> chain_stream.open()
>>> chain_stream.is_chain
"""
stream = Stream(
name=self._name,
session=session,
service=self._service,
skip_summary_links=self._skip_summary_links,
skip_empty=self._skip_empty,
override_summary_links=self._override_summary_links,
extended_params=self._extended_params,
)
return stream | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/pricing/chain/_definition.py | 0.93918 | 0.256219 | _definition.py | pypi |
from typing import Union, Any
import numpy as np
import pandas as pd
from numpy import iterable
from ._enums import Axis
from ._object_definition import ObjectDefinition
from ...delivery._data._data_provider import RequestFactory
from ...delivery.endpoint_request import RequestMethod
from ..._tools._common import ArgsParser, merge_dict_to_dict
types_by_axis = {
Axis.DATE: "datetime64",
"startDate": "datetime64",
"endDate": "datetime64",
Axis.DELTA: float,
Axis.EXPIRY: float,
Axis.MONEYNESS: float,
Axis.STRIKE: float,
Axis.TENOR: float,
"discountFactor": float,
"ratePercent": float,
}
def get_type_by_axis(axis):
axis_values_type = types_by_axis.get(axis)
if not axis_values_type:
raise ValueError(f"Cannot find axis's values type for axis {axis}.")
return axis_values_type
def parse_value(value: Any) -> Union[float, int, np.datetime64]:
if isinstance(value, str):
try:
value = int(value)
except ValueError:
pass
else:
return value
try:
value = pd.to_datetime(value).to_numpy()
except ValueError:
try:
value = pd.to_datetime(value, dayfirst=True).to_numpy()
except ValueError:
pass
else:
return value
try:
value = float(value)
except ValueError:
raise ValueError(f"not valid format: {value}")
return value
value_arg_parser = ArgsParser(parse_value)
x_arg_parser = value_arg_parser
y_arg_parser = value_arg_parser
# ---------------------------------------------------------------------------
# RequestFactory
# ---------------------------------------------------------------------------
def parse_universe(universe):
retval = []
if not iterable(universe):
universe = [universe]
# convert universe's objects into json
for i, item in enumerate(universe):
extended_params = None
if not hasattr(item, "get_dict"):
kwargs = item._kwargs
item = kwargs.get("universe")
extended_params = kwargs.get("extended_params")
item_dict = item.get_dict()
if extended_params:
item_dict.update(extended_params)
retval.append(item_dict)
return retval
def parse_outputs(outputs):
retval = []
if not isinstance(outputs, list):
outputs = [outputs]
for item in outputs:
if hasattr(item, "value"):
item = item.value
retval.append(item)
return retval
universe_arg_parser = ArgsParser(parse_universe)
outputs_arg_parser = ArgsParser(parse_outputs)
class CurvesAndSurfacesRequestFactory(RequestFactory):
def extend_body_parameters(self, body_parameters, extended_params=None, **kwargs):
if not extended_params:
return body_parameters
if kwargs.get("__plural__") is True:
body_parameters.update(extended_params)
return body_parameters
universes = body_parameters.get("universe", [{}])
universes[0] = merge_dict_to_dict(universes[0], extended_params)
return body_parameters
def get_request_method(self, *, method=None, **kwargs):
return method or RequestMethod.POST
def get_body_parameters(self, *args, universe=None, outputs=None, **kwargs):
body_parameters = {}
# universe
universe = universe_arg_parser.get_list(universe)
body_parameters["universe"] = universe
# outputs
if outputs:
outputs = outputs_arg_parser.get_list(outputs)
body_parameters["outputs"] = outputs
return body_parameters
class CrossCurrencyCurvesDefinitionsRequestFactory(RequestFactory):
def get_request_method(self, *, method=None, **kwargs):
return method or RequestMethod.POST
def get_body_parameters(self, *args, request_items: ObjectDefinition, **kwargs):
if isinstance(request_items, ObjectDefinition):
result = request_items.get_dict()
return result
return {}
def extend_body_parameters(self, body_parameters, extended_params=None, **kwargs):
if extended_params:
result = dict(body_parameters)
result.update(extended_params)
return result
return body_parameters
class DatesAndCalendarsRequestFactory(RequestFactory):
def get_request_method(self, *, method=None, **kwargs):
return method or RequestMethod.POST
def get_body_parameters(self, *args, universe, **kwargs):
body_parameters = []
for request_item in universe:
body_parameters.append(request_item.get_dict())
return body_parameters
def extend_body_parameters(self, body_parameters, extended_params=None, **kwargs):
if extended_params:
if isinstance(extended_params, list):
for idx, extended_param_item in enumerate(extended_params):
if extended_param_item:
body_parameters[idx].update(extended_param_item)
else:
for item in body_parameters:
item.update(extended_params)
return body_parameters
class DateScheduleRequestFactory(RequestFactory):
def get_request_method(self, *, method=None, **kwargs):
return method or RequestMethod.POST
def get_body_parameters(self, *args, universe, **kwargs):
return universe.get_dict()
def extend_body_parameters(self, body_parameters, extended_params=None, **kwargs):
if extended_params:
body_parameters.update(extended_params)
return body_parameters | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/_content_provider.py | 0.737064 | 0.296082 | _content_provider.py | pypi |
from dataclasses import dataclass
from itertools import zip_longest
from typing import Any, TYPE_CHECKING
import pandas as pd
from pandas import DataFrame
from ..._content_data import Data as ContentData
from ..._content_data_provider import ContentDataProvider
from ..._content_response_factory import ContentResponseFactory
from ..._error_parser import ErrorParser
from ...._tools import ArgsParser, fields_arg_parser, merge_dict_to_dict
from ...._tools._dataframe import convert_df_columns_to_datetime_by_idx, convert_dtypes
from ....delivery._data._data_provider import (
ContentValidator,
RequestFactory,
ValidatorContainer,
)
from ....delivery.endpoint_request import RequestMethod
if TYPE_CHECKING:
from ....delivery._data._data_provider import ParsedData
# --------------------------------------------------------------------------------------
# Content data validator
# --------------------------------------------------------------------------------------
class ContractsContentValidator(ContentValidator):
@classmethod
def any_valid_content_data(cls, data: "ParsedData") -> bool:
content_data = data.content_data
headers = content_data.get("headers", [])
datas = content_data.get("data", [])
err_codes = []
err_msgs = []
for header, *data_items in zip(headers, *datas):
header_name = header["name"]
if "ErrorCode" == header_name:
err_codes = data_items
elif "ErrorMessage" == header_name:
err_msgs = data_items
counter = len(datas) or 1 # because datas can be empty list
if err_codes or err_msgs:
for err_code, err_msg in zip_longest(err_codes, err_msgs, fillvalue=None):
if err_code or err_msg:
counter -= 1
data.error_codes.append(err_code)
data.error_messages.append(err_msg)
if counter == 0:
return False
return True
def __init__(self) -> None:
super().__init__()
self.validators.append(self.any_valid_content_data)
# ---------------------------------------------------------------------------
# Content data
# ---------------------------------------------------------------------------
def convert_data_items_to_datetime(df: pd.DataFrame, headers: dict) -> pd.DataFrame:
columns_indexes = [index for index, header in enumerate(headers) if header.get("type", "") in ["DateTime", "Date"]]
df = convert_df_columns_to_datetime_by_idx(df, columns_indexes, utc=True, delete_tz=True)
return df
def financial_contracts_build_df(raw: dict, **kwargs) -> pd.DataFrame:
"""
Convert "data" from raw response bond to dataframe format
"""
data = raw.get("data")
headers = raw.get("headers")
if data:
columns = [header["name"] for header in headers]
df = DataFrame(data, columns=columns)
df = convert_data_items_to_datetime(df, headers)
df = convert_dtypes(df)
else:
df = DataFrame()
return df
@dataclass
class Data(ContentData):
"""
This class is designed for storing and managing the response instrument data
"""
_analytics_headers: Any = None
_analytics_data: Any = None
_analytics_market_data: Any = None
_analytics_statuses: Any = None
def __post_init__(self):
if self.raw:
# get headers
self._analytics_headers = self.raw.get("headers")
# get data
self._analytics_data = self.raw.get("data")
# get marketData
self._analytics_market_data = self.raw.get("marketData")
# get statuses
self._analytics_statuses = self.raw.get("statuses")
@property
def analytics_headers(self):
return self._analytics_headers
@property
def analytics_data(self):
return self._analytics_data
@property
def analytics_market_data(self):
return self._analytics_market_data
@property
def analytics_statuses(self):
return self._analytics_statuses
@property
def marketdata_df(self):
"""
Convert "marketData" from raw response bond to dataframe format
"""
return None
# ---------------------------------------------------------------------------
# Request factory
# ---------------------------------------------------------------------------
class ContractsRequestFactory(RequestFactory):
def extend_body_parameters(self, body_parameters, extended_params=None, **kwargs):
if not extended_params:
return body_parameters
if kwargs.get("__plural__") is True:
body_parameters.update(extended_params)
return body_parameters
universes = body_parameters.get("universe", [{}])
universes[0] = merge_dict_to_dict(universes[0], extended_params)
return body_parameters
def get_request_method(self, *, method=None, **kwargs):
return method or RequestMethod.POST
def get_body_parameters(
self,
*args,
universe=None,
definition=None,
fields=None,
outputs=None,
pricing_parameters=None,
**kwargs,
):
plural = kwargs.get("__plural__")
if plural is True:
input_universe = universe
else:
input_universe = [definition]
universe = []
for item in input_universe:
item_defn = item
item_pricing_parameters = not plural and pricing_parameters
item_extended_params = None
if hasattr(item, "_kwargs"):
kwargs = getattr(item, "_kwargs")
item_defn = kwargs.get("definition")
item_pricing_parameters = kwargs.get("pricing_parameters")
item_extended_params = kwargs.get("extended_params")
inst_defn_dict = item_defn.get_dict()
if item_extended_params:
inst_defn_dict.update(item_extended_params)
data = {
"instrumentType": item_defn.get_instrument_type(),
"instrumentDefinition": inst_defn_dict,
}
if item_pricing_parameters:
data["pricingParameters"] = item_pricing_parameters.get_dict()
universe.append(data)
body_parameters = {"universe": universe}
if fields:
fields = fields_arg_parser.get_list(fields)
body_parameters["fields"] = fields
if pricing_parameters and plural is True:
body_parameters["pricingParameters"] = pricing_parameters.get_dict()
return body_parameters
def get_data(definition, pricing_parameters=None):
fields = None
extended_params = None
if hasattr(definition, "_kwargs"):
kwargs = getattr(definition, "_kwargs")
definition = kwargs.get("definition")
fields = kwargs.get("fields")
pricing_parameters = kwargs.get("pricing_parameters")
extended_params = kwargs.get("extended_params")
definition_dict = definition.get_dict()
if extended_params:
definition_dict.update(extended_params)
data = {
"instrumentType": definition.get_instrument_type(),
"instrumentDefinition": definition_dict,
}
if fields:
fields = fields_arg_parser.get_list(fields)
data["fields"] = fields
if pricing_parameters:
data["pricingParameters"] = pricing_parameters.get_dict()
return data
def process_bond_instrument_code(code: Any) -> str:
if code is None or isinstance(code, str):
return code
else:
raise ValueError(f"Invalid type of instrument_code, string is expected. type: {type(code)} is given")
bond_instrument_code_arg_parser = ArgsParser(process_bond_instrument_code)
# ---------------------------------------------------------------------------
# Data provider
# ---------------------------------------------------------------------------
contracts_data_provider = ContentDataProvider(
request=ContractsRequestFactory(),
response=ContentResponseFactory(data_class=Data),
validator=ValidatorContainer(content_validator=ContractsContentValidator()),
parser=ErrorParser(),
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/_contracts_data_provider.py | 0.716615 | 0.187096 | _contracts_data_provider.py | pypi |
from typing import Optional, TYPE_CHECKING
from ._stream_facade import Stream
from .._ipa_content_provider import IPAContentProviderLayer
from ...._content_type import ContentType
from ...._tools import merge_dict_to_dict
if TYPE_CHECKING:
from ...._core.session import Session
class BaseDefinition(IPAContentProviderLayer):
def __init__(self, **kwargs) -> None:
super().__init__(ContentType.CONTRACTS, **kwargs)
def get_stream(
self,
session: Optional["Session"] = None,
) -> Stream:
"""
Returns a streaming quantitative analytic service subscription
Parameters
----------
session : Session, optional
Means the default session will be used
Returns
-------
Stream
Raises
------
AttributeError
If user didn't set default session.
"""
definition = self._kwargs.get("definition")
instrument_type = definition.get_instrument_type()
definition_dict = definition.get_dict()
pricing_parameters = self._kwargs.get("pricing_parameters")
definition = {
"instrumentType": instrument_type,
"instrumentDefinition": definition_dict,
}
if pricing_parameters:
definition["pricingParameters"] = pricing_parameters.get_dict()
extended_params = self._kwargs.get("extended_params")
if extended_params:
definition = merge_dict_to_dict(definition, extended_params)
stream = Stream(
session=session,
fields=self._kwargs.get("fields"),
universe=definition,
)
return stream
def __eq__(self, other):
return self._kwargs.get("definition") == other
def __repr__(self):
repr_str = super().__repr__()
new_str = f" {{name='{self._kwargs.get('definition')}'}}>"
repr_str = repr_str.replace(">", new_str)
return repr_str
class FCBaseDefinition(IPAContentProviderLayer):
def __init__(self, **kwargs) -> None:
super().__init__(ContentType.CONTRACTS, **kwargs)
def __eq__(self, other):
definition = self._kwargs.get("definition")
return definition == other
def __repr__(self):
repr_str = super().__repr__()
new_str = f" {{name='{self._kwargs.get('definition')}'}}>"
repr_str = repr_str.replace(">", new_str)
return repr_str | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/_base_definition.py | 0.908964 | 0.150434 | _base_definition.py | pypi |
from typing import Any, Callable, TYPE_CHECKING
from ._quantitative_data_stream import QuantitativeDataStream
from ...._core.session import get_valid_session
from ...._tools import cached_property, create_repr, make_callback
from ....delivery._stream.base_stream import StreamOpenMixin
if TYPE_CHECKING:
from ...._types import ExtendedParams
from pandas import DataFrame
from ...._core.session import Session
class Stream(StreamOpenMixin):
"""
Open a streaming quantitative analytic service subscription.
Parameters
----------
universe: dict
fields: list
extended_params: dict
Default: None
Methods
-------
open()
Open the QuantitativeDataStream connection
close()
Close the QuantitativeDataStream connection, releases resources
get_snapshot()
Get DataFrame with stream
"""
def __init__(
self,
universe: dict,
fields: list = None,
session: "Session" = None,
extended_params: "ExtendedParams" = None,
) -> None:
self._universe = universe
self._fields = fields
self._session = get_valid_session(session)
self._always_use_default_session = session is None
self._extended_params = extended_params
@cached_property
def _stream(self) -> QuantitativeDataStream:
stream = QuantitativeDataStream(
universe=self._universe,
fields=self._fields,
session=self._session,
extended_params=self._extended_params,
)
return stream
def get_snapshot(self) -> "DataFrame":
"""
Returns DataFrame snapshot a streaming quantitative analytic service
Returns
-------
pd.DataFrame
"""
return self._stream.get_snapshot()
def on_response(self, func: Callable[[list, list, "Stream"], Any]) -> "Stream":
"""
This callback is called with the reference to the stream object,
the instrument name and the instrument response
Parameters
----------
func : Callable
Called when the stream has response
Returns
-------
current instance
"""
self._stream.on_response(make_fin_callback(func))
return self
def on_update(self, func: Callable[[list, list, "Stream"], Any]) -> "Stream":
"""
This callback is called with the reference to the stream object,
the instrument name and the instrument update
Parameters
----------
func : Callable
Called when the stream has a new update
Returns
-------
current instance
"""
self._stream.on_update(make_fin_callback(func))
return self
def on_state(self, func: Callable[[list, "Stream"], Any]) -> "Stream":
"""
This callback is called with the reference to the stream object,
when the stream has new state
Parameters
----------
func : Callable
Called when the stream has a new state
Returns
-------
current instance
"""
self._stream.on_alarm(make_callback(func))
return self
def __repr__(self):
return create_repr(
self,
class_name=self.__class__.__name__,
)
def make_fin_callback(func: Callable[["Stream", list, list], Any]) -> Callable:
"""Return a callback functions with correct arguments order."""
def callback(stream, *args):
func(*args, stream)
return callback | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/_stream_facade.py | 0.933495 | 0.219923 | _stream_facade.py | pypi |
from typing import Optional, List, TYPE_CHECKING, Any, Union
from ._base_definition import BaseDefinition
from ._base_definition import FCBaseDefinition
from ...._tools import create_repr, iterable, try_copy_to_list
if TYPE_CHECKING:
from .._object_definition import ObjectDefinition
from ...._types import OptStrStrs
def validate_universe(universe: Any) -> None:
validate = True
if iterable(universe):
for item in universe:
validate = isinstance(item, BaseDefinition)
if not validate:
break
else:
validate = isinstance(universe, BaseDefinition)
if not validate:
raise TypeError(
f"Provided type for parameter 'universe' is invalid. "
f"Expected types: "
f"[bond.Definition, cap_floor.Definition, cds.Definition, "
f"cross.Definition, option.Definition, repo.Definition, "
f"swap.Definition, swaption.Definition, term_deposit.Definition]"
)
DefnDefns = Union[BaseDefinition, List[BaseDefinition]]
class Definitions(FCBaseDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
universe : list
Array of Financial Contract definitions.
fields: list of str, optional
Array of requested fields. each requested field will represent
a column of the tabular response. By default all relevant fields
are returned.
pricing_parameters : PricingParameters, optional
Pricing parameters that are specific to the financial contracts
defined in universe.
Methods
-------
get_data(session=session, on_response=on_response, async_mode=None)
Returns a response to the data platform
get_data_async(session=session, on_response=on_response, async_mode=None)
Returns a response to the async data platform
Examples
--------
>>> import refinitiv.data.content.ipa.financial_contracts as rdf
>>> option_definition = rdf.option.Definition(
... instrument_code="FCHI560000L1.p",
... underlying_type=rdf.option.UnderlyingType.ETI,
... fields=[
... "MarketValueInDealCcy",
... "DeltaPercent",
... "GammaPercent",
... "RhoPercent",
... "ThetaPercent",
... "VegaPercent",
... "ErrorCode",
... "ErrorMessage",
... ],
... )
>>> bond_definition = rdf.bond.Definition(
... issue_date="2002-02-28",
... end_date="2032-02-28",
... notional_ccy="USD",
... interest_payment_frequency="Annual",
... fixed_rate_percent=7,
... interest_calculation_method=rdf.bond.DayCountBasis.DCB_ACTUAL_ACTUAL
... )
>>> definition = rdf.Definitions(
... [
... bond_definition,
... option_definition
... ]
... )
>>> response = definition.get_data()
"""
def __init__(
self,
universe: "DefnDefns",
fields: "OptStrStrs" = None,
pricing_parameters: Optional["ObjectDefinition"] = None,
) -> None:
validate_universe(universe)
universe = try_copy_to_list(universe)
fields = try_copy_to_list(fields)
if not isinstance(universe, list):
universe = [universe]
super().__init__(
universe=universe,
fields=fields,
pricing_parameters=pricing_parameters,
__plural__=True,
)
def __repr__(self):
return create_repr(self) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/_definition.py | 0.91869 | 0.196383 | _definition.py | pypi |
from typing import Optional, Union
from .._instrument_definition import ObjectDefinition
from ..._enums import (
Direction,
DocClause,
Seniority,
)
class ProtectionLegDefinition(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
direction : Direction or str, optional
The direction of the leg. Optional for a single leg instrument (like a bond), in that case default value
is Received. It is mandatory for a multi-instrument leg instrument (like Swap
or CDS leg).
notional_ccy : str, optional
The ISO code of the notional currency. Mandatory if instrument code or
instrument style has not been defined. In case an instrument code/style has been
defined, value may comes from the reference data.
notional_amount : float, optional
The notional amount of the leg at the period start date. Optional. By default
1,000,000 is used.
doc_clause : DocClause or str, optional
The restructuring clause or credit event for Single Name Cds. Optional.
By default the doc_clause of the reference_entity's
Primary Ric is used.
seniority : Seniority or str, optional
The order of repayment in the case of a credit event for Single Name Cds. Optional. By default
the seniority of the reference_entity's Primary Ric is used.
index_factor : float, optional
The factor that is applied to the notional in case a credit event happens in one
of the constituents of the Cds Index. Optional. By default no factor (1)
applies.
index_series : int, optional
The series of the Cds Index. Optional. By default the series of the BenchmarkRic
is used.
recovery_rate : float, optional
The percentage of recovery in case of a credit event. Optional. By default the
recovery_rate of the Cds built from reference_entity, seniority, doc_clause and
notional_currency is used.
recovery_rate_percent : float, optional
The percentage of recovery in case of a credit event. Optional. By default the
recovery_rate of the Cds built from reference_entity, seniority, doc_clause and
notional_currency is used.
reference_entity : str, optional
The identifier of the reference entity, it can be:
- for Single Name : a RedCode, an OrgId, a reference entity's RIC,
- for Index : a RedCode, a ShortName, a CommonName. Mandatory.
settlement_convention : str, optional
The cashSettlementRule of the CDS. Optional. By default "3WD" (3 week days) is
used.
"""
def __init__(
self,
*,
direction: Union[Direction, str] = None,
notional_ccy: Optional[str] = None,
notional_amount: Optional[float] = None,
doc_clause: Union[DocClause, str] = None,
seniority: Union[Seniority, str] = None,
index_factor: Optional[float] = None,
index_series: Optional[int] = None,
recovery_rate: Optional[float] = None,
recovery_rate_percent: Optional[float] = None,
reference_entity: Optional[str] = None,
settlement_convention: Optional[str] = None,
) -> None:
super().__init__()
self.direction = direction
self.notional_ccy = notional_ccy
self.notional_amount = notional_amount
self.doc_clause = doc_clause
self.seniority = seniority
self.index_factor = index_factor
self.index_series = index_series
self.recovery_rate = recovery_rate
self.recovery_rate_percent = recovery_rate_percent
self.reference_entity = reference_entity
self.settlement_convention = settlement_convention
@property
def direction(self):
"""
The direction of the leg. the possible values are:
- 'Paid' (the cash flows of the leg are paid to the counterparty),
- 'Received' (the cash flows of the leg are received from the counterparty).
Optional for a single leg instrument (like a bond), in that case default value
is Received. It is mandatory for a multi-instrument leg instrument (like Swap
or CDS leg).
:return: enum Direction
"""
return self._get_enum_parameter(Direction, "direction")
@direction.setter
def direction(self, value):
self._set_enum_parameter(Direction, "direction", value)
@property
def doc_clause(self):
"""
The restructuring clause or credit event for Single Name Cds. The possible
values are:
- CumRestruct14,
- ModifiedRestruct14,
- ModModRestruct14,
- ExRestruct14,
- CumRestruct03,
- ModifiedRestruct03,
- ModModRestruct03,
- ExRestruct03. Optional.
By default the doc_clause of the reference_entity's Primary Ric is used.
:return: enum DocClause
"""
return self._get_enum_parameter(DocClause, "docClause")
@doc_clause.setter
def doc_clause(self, value):
self._set_enum_parameter(DocClause, "docClause", value)
@property
def seniority(self):
"""
The order of repayment in the case of a credit event for Single Name Cds. The
possible values are:
- Secured (Secured Debt (Corporate/Financial) or Domestic Currency Sovereign
Debt (Government)),
- SeniorUnsecured (Senior Unsecured Debt (Corporate/Financial) or Foreign
Currency Sovereign Debt (Government)),
- Subordinated (Subordinated or Lower Tier 2 Debt (Banks)),
- JuniorSubordinated (Junior Subordinated or Upper Tier 2 Debt (Banks)),
- Preference (Preference Shares or Tier 1 Capital (Banks)).
Optional. By default the seniority of the reference_entity's
Primary Ric is used.
:return: enum Seniority
"""
return self._get_enum_parameter(Seniority, "seniority")
@seniority.setter
def seniority(self, value):
self._set_enum_parameter(Seniority, "seniority", value)
@property
def index_factor(self):
"""
The factor that is applied to the notional in case a credit event happens in one
of the constituents of the Cds Index. Optional. By default no factor (1)
applies.
:return: float
"""
return self._get_parameter("indexFactor")
@index_factor.setter
def index_factor(self, value):
self._set_parameter("indexFactor", value)
@property
def index_series(self):
"""
The series of the Cds Index. Optional.
By default the series of the BenchmarkRic is used.
:return: int
"""
return self._get_parameter("indexSeries")
@index_series.setter
def index_series(self, value):
self._set_parameter("indexSeries", value)
@property
def notional_amount(self):
"""
The notional amount of the leg at the period start date. Optional. By default
1,000,000 is used.
:return: float
"""
return self._get_parameter("notionalAmount")
@notional_amount.setter
def notional_amount(self, value):
self._set_parameter("notionalAmount", value)
@property
def notional_ccy(self):
"""
The ISO code of the notional currency. Mandatory if instrument code or
instrument style has not been defined. In case an instrument code/style has been
defined, value may comes from the reference data.
:return: str
"""
return self._get_parameter("notionalCcy")
@notional_ccy.setter
def notional_ccy(self, value):
self._set_parameter("notionalCcy", value)
@property
def recovery_rate(self):
"""
The percentage of recovery in case of a credit event. Optional. By default the
recovery_rate of the Cds built from reference_entity, seniority, doc_clause and
notional_currency is used.
:return: float
"""
return self._get_parameter("recoveryRate")
@recovery_rate.setter
def recovery_rate(self, value):
self._set_parameter("recoveryRate", value)
@property
def recovery_rate_percent(self):
"""
The percentage of recovery in case of a credit event. Optional. By default the
recovery_rate of the Cds built from reference_entity, seniority, doc_clause and
notional_currency is used.
:return: float
"""
return self._get_parameter("recoveryRatePercent")
@recovery_rate_percent.setter
def recovery_rate_percent(self, value):
self._set_parameter("recoveryRatePercent", value)
@property
def reference_entity(self):
"""
The identifier of the reference entity, it can be:
- for Single Name : a RedCode, an OrgId, a reference entity's RIC,
- for Index : a RedCode, a ShortName, a CommonName. Mandatory.
:return: str
"""
return self._get_parameter("referenceEntity")
@reference_entity.setter
def reference_entity(self, value):
self._set_parameter("referenceEntity", value)
@property
def settlement_convention(self):
"""
The cashSettlementRule of the CDS. Optional. By default "3WD" (3 week days) is
used.
:return: str
"""
return self._get_parameter("settlementConvention")
@settlement_convention.setter
def settlement_convention(self, value):
self._set_parameter("settlementConvention", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/cds/_protection_leg_definition.py | 0.946126 | 0.620162 | _protection_leg_definition.py | pypi |
from typing import Optional, Union
from ....._types import OptDateTime
from .._instrument_definition import InstrumentDefinition
from ..._enums import (
BusinessDayConvention,
CdsConvention,
)
from ._premium_leg_definition import PremiumLegDefinition
from ._protection_leg_definition import ProtectionLegDefinition
class CdsInstrumentDefinition(InstrumentDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
instrument_tag : str, optional
User defined string to identify the instrument.It can be used to link output
results to the instrument definition. Only alphabetic, numeric and '- _.#=@'
characters are supported. Optional.
instrument_code : str, optional
A cds RIC that is used to retrieve the description of the cds contract.
Optional. If null, the protection_leg and the premium_leg must be provided.
cds_convention : CdsConvention or str, optional
Define the cds convention. Optional. Defaults to 'ISDA'.
trade_date : str or date or datetime or timedelta, optional
The date the cds contract was created. Optional. By default the valuation date.
step_in_date : str or date or datetime or timedelta, optional
The effective protection date. Optional. By default the trade_date + 1 calendar.
start_date : str or date or datetime or timedelta, optional
The date the cds starts accruing interest. Its effective date. Optional. By
default it is the accrued_begin_date (the last IMM date before trade_date) if
cds_convention is ISDA, else it is the step_in_date.
end_date : str or date or datetime or timedelta, optional
The maturity date of the cds contract. Mandatory if instrument_code is null.
Either the end_date or the tenor must be provided.
tenor : str, optional
The period code that represents the time between the start date and end date the
contract. Mandatory if instrument_code is null. Either the end_date or the tenor
must be provided.
start_date_moving_convention : BusinessDayConvention or str, optional
The method to adjust the start_date. Optional. By default 'NoMoving' is used.
end_date_moving_convention : BusinessDayConvention, optional
The method to adjust the end_date. Optional. By default 'NoMoving' is used.
adjust_to_isda_end_date : bool, optional
The way the end_date is adjusted if computed from tenor input. The possible
values are:
- true ( the end_date is an IMM date computed from start_date according to ISDA
rules, ),
- false ( the end_date is computed from start_date according to
end_dateMovingConvention), Optional. By default true is used if cds_convention
is ISDA, else false is used.
protection_leg : ProtectionLegDefinition, optional
The Protection Leg of the CDS. It is the default leg. Mandatory if
instrument_code is null. Optional if instrument_code not null.
premium_leg : PremiumLegDefinition, optional
The Premium Leg of the CDS. It is a swap leg paying a fixed coupon. Mandatory if
instrument_code is null. Optional if instrument_code not null.
accrued_begin_date : str or date or datetime or timedelta, optional
The last cashflow date. Optional. By default it is the last cashflow date.
"""
def __init__(
self,
instrument_tag: Optional[str] = None,
instrument_code: Optional[str] = None,
cds_convention: Union[CdsConvention, str] = None,
trade_date: "OptDateTime" = None,
step_in_date: "OptDateTime" = None,
start_date: "OptDateTime" = None,
end_date: "OptDateTime" = None,
tenor: Optional[str] = None,
start_date_moving_convention: Union[BusinessDayConvention, str] = None,
end_date_moving_convention: Union[BusinessDayConvention, str] = None,
adjust_to_isda_end_date: Optional[bool] = None,
protection_leg: Optional[ProtectionLegDefinition] = None,
premium_leg: Optional[PremiumLegDefinition] = None,
accrued_begin_date: "OptDateTime" = None,
) -> None:
super().__init__()
self.instrument_tag = instrument_tag
self.instrument_code = instrument_code
self.cds_convention = cds_convention
self.trade_date = trade_date
self.step_in_date = step_in_date
self.start_date = start_date
self.end_date = end_date
self.tenor = tenor
self.start_date_moving_convention = start_date_moving_convention
self.end_date_moving_convention = end_date_moving_convention
self.adjust_to_isda_end_date = adjust_to_isda_end_date
self.protection_leg = protection_leg
self.premium_leg = premium_leg
self.accrued_begin_date = accrued_begin_date
def get_instrument_type(self):
return "Cds"
@property
def cds_convention(self):
"""
Define the cds convention. Possible values are:
- 'ISDA' (start_date will default to accrued_begin_date, end_date will be
adjusted to IMM Date),
- 'UserDefined' (start_date will default to step_in_date, end_date will not be
adjusted). Optional. Defaults to 'ISDA'.
:return: enum CdsConvention
"""
return self._get_enum_parameter(CdsConvention, "cdsConvention")
@cds_convention.setter
def cds_convention(self, value):
self._set_enum_parameter(CdsConvention, "cdsConvention", value)
@property
def end_date_moving_convention(self):
"""
The method to adjust the end_date. The possible values are:
- ModifiedFollowing (adjusts dates according to the Modified Following
convention - next business day unless is it goes into the next month,
preceeding is used in that case),
- NextBusinessDay (adjusts dates according to the Following convention - Next
Business Day),
- PreviousBusinessDay (adjusts dates according to the Preceeding convention -
Previous Business Day),
- NoMoving (does not adjust dates),
- BbswModifiedFollowing (adjusts dates according to the BBSW Modified Following
convention). Optional. By default 'NoMoving' is used.
:return: enum BusinessDayConvention
"""
return self._get_enum_parameter(BusinessDayConvention, "endDateMovingConvention")
@end_date_moving_convention.setter
def end_date_moving_convention(self, value):
self._set_enum_parameter(BusinessDayConvention, "endDateMovingConvention", value)
@property
def premium_leg(self):
"""
The Premium Leg of the CDS. It is a swap leg paying a fixed coupon. Mandatory if
instrument_code is null. Optional if instrument_code not null.
:return: object PremiumLegDefinition
"""
return self._get_object_parameter(PremiumLegDefinition, "premiumLeg")
@premium_leg.setter
def premium_leg(self, value):
self._set_object_parameter(PremiumLegDefinition, "premiumLeg", value)
@property
def protection_leg(self):
"""
The Protection Leg of the CDS. It is the default leg. Mandatory if
instrument_code is null. Optional if instrument_code not null.
:return: object ProtectionLegDefinition
"""
return self._get_object_parameter(ProtectionLegDefinition, "protectionLeg")
@protection_leg.setter
def protection_leg(self, value):
self._set_object_parameter(ProtectionLegDefinition, "protectionLeg", value)
@property
def start_date_moving_convention(self):
"""
The method to adjust the start_date. The possible values are:
- ModifiedFollowing (adjusts dates according to the Modified Following
convention - next business day unless is it goes into the next month,
preceeding is used in that case),
- NextBusinessDay (adjusts dates according to the Following convention - Next
Business Day),
- PreviousBusinessDay (adjusts dates according to the Preceeding convention -
Previous Business Day),
- NoMoving (does not adjust dates),
- BbswModifiedFollowing (adjusts dates according to the BBSW Modified Following
convention). Optional. By default 'NoMoving' is used.
:return: enum BusinessDayConvention
"""
return self._get_enum_parameter(BusinessDayConvention, "startDateMovingConvention")
@start_date_moving_convention.setter
def start_date_moving_convention(self, value):
self._set_enum_parameter(BusinessDayConvention, "startDateMovingConvention", value)
@property
def accrued_begin_date(self):
"""
The last cashflow date. Optional. By default it is the last cashflow date.
:return: str
"""
return self._get_parameter("accruedBeginDate")
@accrued_begin_date.setter
def accrued_begin_date(self, value):
self._set_datetime_parameter("accruedBeginDate", value)
@property
def adjust_to_isda_end_date(self):
"""
The way the end_date is adjusted if computed from tenor input. The possible
values are:
- true ( the end_date is an IMM date computed from start_date according to ISDA
rules, ),
- false ( the end_date is computed from start_date according to
end_dateMovingConvention),
Optional. By default true is used if cds_convention is ISDA, else false is used.
:return: bool
"""
return self._get_parameter("adjustToIsdaEndDate")
@adjust_to_isda_end_date.setter
def adjust_to_isda_end_date(self, value):
self._set_parameter("adjustToIsdaEndDate", value)
@property
def end_date(self):
"""
The maturity date of the cds contract. Mandatory if instrument_code is null.
Either the end_date or the tenor must be provided.
:return: str
"""
return self._get_parameter("endDate")
@end_date.setter
def end_date(self, value):
self._set_datetime_parameter("endDate", value)
@property
def instrument_code(self):
"""
A cds RIC that is used to retrieve the description of the cds contract.
Optional. If null, the protection_leg and the premium_leg must be provided.
:return: str
"""
return self._get_parameter("instrumentCode")
@instrument_code.setter
def instrument_code(self, value):
self._set_parameter("instrumentCode", value)
@property
def start_date(self):
"""
The date the cds starts accruing interest. Its effective date. Optional. By
default it is the accrued_begin_date (the last IMM date before trade_date) if
cds_convention is ISDA, else it is the step_in_date.
:return: str
"""
return self._get_parameter("startDate")
@start_date.setter
def start_date(self, value):
self._set_datetime_parameter("startDate", value)
@property
def step_in_date(self):
"""
The effective protection date. Optional. By default the trade_date + 1 calendar.
:return: str
"""
return self._get_parameter("stepInDate")
@step_in_date.setter
def step_in_date(self, value):
self._set_date_parameter("stepInDate", value)
@property
def tenor(self):
"""
The period code that represents the time between the start date and end date the
contract. Mandatory if instrument_code is null. Either the end_date or the tenor
must be provided.
:return: str
"""
return self._get_parameter("tenor")
@tenor.setter
def tenor(self, value):
self._set_parameter("tenor", value)
@property
def trade_date(self):
"""
The date the cds contract was created. Optional. By default the valuation date.
:return: str
"""
return self._get_parameter("tradeDate")
@trade_date.setter
def trade_date(self, value):
self._set_date_parameter("tradeDate", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/cds/_cds_definition.py | 0.945607 | 0.438424 | _cds_definition.py | pypi |
from typing import Optional, Union
from ....._types import OptDateTime
from .._instrument_definition import ObjectDefinition
from ..._enums import (
BusinessDayConvention,
DayCountBasis,
Direction,
Frequency,
StubRule,
)
class PremiumLegDefinition(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
direction : Direction or str, optional
The direction of the leg.
notional_ccy : str, optional
The ISO code of the notional currency. Mandatory if instrument code or
instrument style has not been defined. In case an instrument code/style has been
defined, value may comes from the reference data.
notional_amount : float, optional
The notional amount of the leg at the period start date. Optional. By default
1,000,000 is used.
fixed_rate_percent : float, optional
The fixed coupon rate in percentage. It is mandatory in case of a single leg
instrument. Otherwise, in case of multi leg instrument, it can be computed as
the Par rate.
interest_payment_frequency : Frequency or str, optional
The frequency of the interest payments. Optional if an instrument code/style
have been defined : in that case, value comes from reference data. Otherwise, it
is mandatory.
interest_calculation_method : DayCountBasis or str, optional
The Day Count Basis method used to calculate the coupon interest payments.
Mandatory.
accrued_calculation_method : DayCountBasis or str, optional
The Day Count Basis method used to calculate the accrued interest payments.
Optional. By default, the same value than interest_calculation_method is used.
payment_business_day_convention : BusinessDayConvention or str, optional
The method to adjust dates to a working day. Optional.
In case an instrument code/style has been defined, value comes from
bond reference data. Otherwise 'ModifiedFollowing' is used.
first_regular_payment_date : str or date or datetime or timedelta, optional
The first regular coupon payment date for leg with an odd first coupon.
Optional.
last_regular_payment_date : str or date or datetime or timedelta, optional
The last regular coupon payment date for leg with an odd last coupon. Optional.
payment_business_days : str, optional
A list of coma-separated calendar codes to adjust dates (e.g. 'EMU' or 'USA').
Optional. By default the calendar associated to notional_ccy is used.
stub_rule : StubRule or str, optional
The rule that defines whether coupon roll dates are aligned on the maturity or
the issue date. Optional. By default 'Maturity' is used.
accrued_paid_on_default : bool, optional
Specifies whether the accrued is paid at the credit event date or not.
- true : the accrued is paid at the credit event date
- false : the accrued is not paid at the credit event date Optional. Defaults
to false.
interest_payment_ccy : str, optional
The ISO code of the interest payment currency. Mandatory.
"""
def __init__(
self,
*,
direction: Union[Direction, str] = None,
notional_ccy: Optional[str] = None,
notional_amount: Optional[float] = None,
fixed_rate_percent: Optional[float] = None,
interest_payment_frequency: Union[Frequency, str] = None,
interest_calculation_method: Union[DayCountBasis, str] = None,
accrued_calculation_method: Union[DayCountBasis, str] = None,
payment_business_day_convention: Union[BusinessDayConvention, str] = None,
first_regular_payment_date: "OptDateTime" = None,
last_regular_payment_date: "OptDateTime" = None,
payment_business_days: Optional[str] = None,
stub_rule: Union[StubRule, str] = None,
accrued_paid_on_default: Optional[bool] = None,
interest_payment_ccy: Optional[str] = None,
) -> None:
super().__init__()
self.direction = direction
self.notional_ccy = notional_ccy
self.notional_amount = notional_amount
self.fixed_rate_percent = fixed_rate_percent
self.interest_payment_frequency = interest_payment_frequency
self.interest_calculation_method = interest_calculation_method
self.accrued_calculation_method = accrued_calculation_method
self.payment_business_day_convention = payment_business_day_convention
self.first_regular_payment_date = first_regular_payment_date
self.last_regular_payment_date = last_regular_payment_date
self.payment_business_days = payment_business_days
self.stub_rule = stub_rule
self.accrued_paid_on_default = accrued_paid_on_default
self.interest_payment_ccy = interest_payment_ccy
@property
def accrued_calculation_method(self):
"""
The Day Count Basis method used to calculate the accrued interest payments.
Optional. By default, the same value than interest_calculation_method is used.
:return: enum DayCountBasis
"""
return self._get_enum_parameter(DayCountBasis, "accruedCalculationMethod")
@accrued_calculation_method.setter
def accrued_calculation_method(self, value):
self._set_enum_parameter(DayCountBasis, "accruedCalculationMethod", value)
@property
def direction(self):
"""
The direction of the leg. the possible values are:
- 'Paid' (the cash flows of the leg are paid to the counterparty),
- 'Received' (the cash flows of the leg are received from the counterparty).
Optional for a single leg instrument (like a bond), in that case default value
is Received. It is mandatory for a multi-instrument leg instrument (like Swap
or CDS leg).
:return: enum Direction
"""
return self._get_enum_parameter(Direction, "direction")
@direction.setter
def direction(self, value):
self._set_enum_parameter(Direction, "direction", value)
@property
def interest_calculation_method(self):
"""
The Day Count Basis method used to calculate the coupon interest payments.
Mandatory.
:return: enum DayCountBasis
"""
return self._get_enum_parameter(DayCountBasis, "interestCalculationMethod")
@interest_calculation_method.setter
def interest_calculation_method(self, value):
self._set_enum_parameter(DayCountBasis, "interestCalculationMethod", value)
@property
def interest_payment_frequency(self):
"""
The frequency of the interest payments. Optional if an instrument code/style
have been defined : in that case, value comes from reference data. Otherwise, it
is mandatory.
:return: enum Frequency
"""
return self._get_enum_parameter(Frequency, "interestPaymentFrequency")
@interest_payment_frequency.setter
def interest_payment_frequency(self, value):
self._set_enum_parameter(Frequency, "interestPaymentFrequency", value)
@property
def payment_business_day_convention(self):
"""
The method to adjust dates to a working day. The possible values are:
- ModifiedFollowing (adjusts dates according to the Modified Following
convention - next business day unless is it goes into the next month,
preceeding is used in that case),
- NextBusinessDay (adjusts dates according to the Following convention - Next
Business Day),
- PreviousBusinessDay (adjusts dates according to the Preceeding convention -
Previous Business Day),
- NoMoving (does not adjust dates),
- BbswModifiedFollowing (adjusts dates according to the BBSW Modified Following
convention). Optional. In case an instrument code/style has been defined,
value comes from bond reference data. Otherwise 'ModifiedFollowing' is used.
:return: enum BusinessDayConvention
"""
return self._get_enum_parameter(BusinessDayConvention, "paymentBusinessDayConvention")
@payment_business_day_convention.setter
def payment_business_day_convention(self, value):
self._set_enum_parameter(BusinessDayConvention, "paymentBusinessDayConvention", value)
@property
def stub_rule(self):
"""
The rule that defines whether coupon roll dates are aligned on the maturity or
the issue date. The possible values are:
- ShortFirstProRata (to create a short period between the start date and the
first coupon date, and pay a smaller amount of interest for the short
period.All coupon dates are calculated backward from the maturity date),
- ShortFirstFull (to create a short period between the start date and the first
coupon date, and pay a regular coupon on the first coupon date. All coupon
dates are calculated backward from the maturity date),
- LongFirstFull (to create a long period between the start date and the second
coupon date, and pay a regular coupon on the second coupon date. All coupon
dates are calculated backward from the maturity date),
- ShortLastProRata (to create a short period between the last payment date and
maturity, and pay a smaller amount of interest for the short period. All
coupon dates are calculated forward from the start date). This property may
also be used in conjunction with first_regular_payment_date and
last_regular_payment_date; in that case the following values can be defined:
- Issue (all dates are aligned on the issue date),
- Maturity (all dates are aligned on the maturity date). Optional. By default
'Maturity' is used.
:return: enum StubRule
"""
return self._get_enum_parameter(StubRule, "stubRule")
@stub_rule.setter
def stub_rule(self, value):
self._set_enum_parameter(StubRule, "stubRule", value)
@property
def accrued_paid_on_default(self):
"""
Specifies whether the accrued is paid at the credit event date or not.
- true : the accrued is paid at the credit event date
- false : the accrued is not paid at the credit event date Optional. Defaults
to false.
:return: bool
"""
return self._get_parameter("accruedPaidOnDefault")
@accrued_paid_on_default.setter
def accrued_paid_on_default(self, value):
self._set_parameter("accruedPaidOnDefault", value)
@property
def first_regular_payment_date(self):
"""
The first regular coupon payment date for leg with an odd first coupon.
Optional.
:return: str
"""
return self._get_parameter("firstRegularPaymentDate")
@first_regular_payment_date.setter
def first_regular_payment_date(self, value):
self._set_datetime_parameter("firstRegularPaymentDate", value)
@property
def fixed_rate_percent(self):
"""
The fixed coupon rate in percentage. It is mandatory in case of a single leg
instrument. Otherwise, in case of multi leg instrument, it can be computed as
the Par rate.
:return: float
"""
return self._get_parameter("fixedRatePercent")
@fixed_rate_percent.setter
def fixed_rate_percent(self, value):
self._set_parameter("fixedRatePercent", value)
@property
def interest_payment_ccy(self):
"""
The ISO code of the interest payment currency. Mandatory.
:return: str
"""
return self._get_parameter("interestPaymentCcy")
@interest_payment_ccy.setter
def interest_payment_ccy(self, value):
self._set_parameter("interestPaymentCcy", value)
@property
def last_regular_payment_date(self):
"""
The last regular coupon payment date for leg with an odd last coupon. Optional.
:return: str
"""
return self._get_parameter("lastRegularPaymentDate")
@last_regular_payment_date.setter
def last_regular_payment_date(self, value):
self._set_datetime_parameter("lastRegularPaymentDate", value)
@property
def notional_amount(self):
"""
The notional amount of the leg at the period start date. Optional. By default
1,000,000 is used.
:return: float
"""
return self._get_parameter("notionalAmount")
@notional_amount.setter
def notional_amount(self, value):
self._set_parameter("notionalAmount", value)
@property
def notional_ccy(self):
"""
The ISO code of the notional currency. Mandatory if instrument code or
instrument style has not been defined. In case an instrument code/style has been
defined, value may comes from the reference data.
:return: str
"""
return self._get_parameter("notionalCcy")
@notional_ccy.setter
def notional_ccy(self, value):
self._set_parameter("notionalCcy", value)
@property
def payment_business_days(self):
"""
A list of coma-separated calendar codes to adjust dates (e.g. 'EMU' or 'USA').
Optional. By default the calendar associated to notional_ccy is used.
:return: str
"""
return self._get_parameter("paymentBusinessDays")
@payment_business_days.setter
def payment_business_days(self, value):
self._set_parameter("paymentBusinessDays", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/cds/_premium_leg_definition.py | 0.955899 | 0.728579 | _premium_leg_definition.py | pypi |
from typing import Optional, TYPE_CHECKING
from . import PricingParameters
from ._cds_definition import CdsInstrumentDefinition
from ..._enums import (
BusinessDayConvention,
CdsConvention,
)
from ._premium_leg_definition import PremiumLegDefinition
from ._protection_leg_definition import ProtectionLegDefinition
from .._base_definition import BaseDefinition
from ....._tools import try_copy_to_list
if TYPE_CHECKING:
from ....._types import ExtendedParams, OptStrStrs, OptDateTime
class Definition(BaseDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
instrument_tag : str, optional
User defined string to identify the instrument.It can be used to link output
results to the instrument definition. Only alphabetic, numeric and '- _.#=@'
characters are supported. Optional.
instrument_code : str, optional
A cds RIC that is used to retrieve the description of the cds contract.
Optional. If null, the protection_leg and the premium_leg must be provided.
cds_convention : CdsConvention, optional
Define the cds convention. Optional. Defaults to 'ISDA'.
trade_date : str or date, optional
The date the cds contract was created. Optional. By default the valuation date.
step_in_date : str or date, optional
The effective protection date. Optional. By default the trade_date + 1 calendar.
start_date : str or date or datetime or timedelta, optional
The date the cds starts accruing interest. Its effective date. Optional. By
default it is the accrued_begin_date (the last IMM date before trade_date) if
cds_convention is ISDA, else it is the step_in_date.
end_date : str or date or datetime or timedelta, optional
The maturity date of the cds contract. Mandatory if instrument_code is null.
Either the end_date or the tenor must be provided.
tenor : str, optional
The period code that represents the time between the start date and end date the
contract. Mandatory if instrument_code is null. Either the end_date or the tenor
must be provided.
start_date_moving_convention : BusinessDayConvention, optional
The method to adjust the start_date. Optional. By default 'NoMoving' is used.
end_date_moving_convention : BusinessDayConvention, optional
The method to adjust the end_date. Optional. By default 'NoMoving' is used.
adjust_to_isda_end_date : bool, optional
The way the end_date is adjusted if computed from tenor input. Optional.
By default true is used if cds_convention is ISDA, else false is used.
protection_leg : ProtectionLegDefinition, optional
The Protection Leg of the CDS. It is the default leg. Mandatory if instrumenCode
is null. Optional if instrument_code not null.
premium_leg : PremiumLegDefinition, optional
The Premium Leg of the CDS. It is a swap leg paying a fixed coupon. Mandatory if
instrument_code is null. Optional if instrument_code not null.
accrued_begin_date : str, optional
The last cashflow date. Optional. By default it is the last cashflow date.
fields: list of str, optional
Contains the list of Analytics that the quantitative analytic service will
compute.
pricing_parameters : PricingParameters, optional
The pricing parameters to apply to this instrument. Optional. If pricing
parameters are not provided at this level parameters defined globally at the
request level are used. If no pricing parameters are provided globally default
values apply.
extended_params : dict, optional
If necessary other parameters
Methods
-------
get_data(session=session, on_response=on_response)
Returns a response to the data platform
get_stream(session=session)
Get stream object of this definition
Examples
--------
>>> import refinitiv.data.content.ipa.financial_contracts as rdf
>>> definition = rdf.cds.Definition(
... instrument_tag="Cds1_InstrumentCode",
... instrument_code="BNPP5YEUAM=R",
... cds_convention=rdf.cds.CdsConvention.ISDA,
... end_date_moving_convention=rdf.cds.BusinessDayConvention.NO_MOVING,
... adjust_to_isda_end_date=True,
... pricing_parameters=rdf.cds.PricingParameters(
... market_data_date="2020-01-01"
... ),
... fields=[
... "InstrumentTag",
... "ValuationDate",
... "InstrumentDescription",
... "StartDate",
... "EndDate",
... "SettlementDate",
... "UpfrontAmountInDealCcy",
... "CashAmountInDealCcy",
... "AccruedAmountInDealCcy",
... "AccruedBeginDate",
... "NextCouponDate",
... "UpfrontPercent",
... "ConventionalSpreadBp",
... "ParSpreadBp",
... "AccruedDays",
... "ErrorCode",
... "ErrorMessage",
... ],
... )
>>> response = definition.get_data()
>>> df = response.data.df
"""
def __init__(
self,
instrument_tag: Optional[str] = None,
instrument_code: Optional[str] = None,
cds_convention: Optional[CdsConvention] = None,
trade_date: "OptDateTime" = None,
step_in_date: "OptDateTime" = None,
start_date: "OptDateTime" = None,
end_date: "OptDateTime" = None,
tenor: Optional[str] = None,
start_date_moving_convention: Optional[BusinessDayConvention] = None,
end_date_moving_convention: Optional[BusinessDayConvention] = None,
adjust_to_isda_end_date: Optional[bool] = None,
protection_leg: Optional[ProtectionLegDefinition] = None,
premium_leg: Optional[PremiumLegDefinition] = None,
accrued_begin_date: "OptDateTime" = None,
fields: "OptStrStrs" = None,
pricing_parameters: Optional[PricingParameters] = None,
extended_params: "ExtendedParams" = None,
):
fields = try_copy_to_list(fields)
definition = CdsInstrumentDefinition(
cds_convention=cds_convention,
end_date_moving_convention=end_date_moving_convention,
premium_leg=premium_leg,
protection_leg=protection_leg,
start_date_moving_convention=start_date_moving_convention,
accrued_begin_date=accrued_begin_date,
adjust_to_isda_end_date=adjust_to_isda_end_date,
end_date=end_date,
instrument_code=instrument_code,
instrument_tag=instrument_tag,
start_date=start_date,
step_in_date=step_in_date,
tenor=tenor,
trade_date=trade_date,
)
super().__init__(
definition=definition,
fields=fields,
pricing_parameters=pricing_parameters,
extended_params=extended_params,
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/cds/_definition.py | 0.931197 | 0.342681 | _definition.py | pypi |
from typing import Optional
from ....._types import OptDateTime
from ..._object_definition import ObjectDefinition
class PricingParameters(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
cash_amount_in_deal_ccy : float, optional
cash_amount_in_deal_ccy to override and that will be used as pricing analysis
input to compute the cds other outputs. Optional. No override is applied by
default. Note that only one pricing analysis input should be defined.
clean_price_percent : float, optional
clean_price_percent to override and that will be used as pricing analysis input
to compute the cds other outputs. Optional. No override is applied by default.
Note that only one pricing analysis input should be defined.
conventional_spread_bp : float, optional
conventional_spread_bp to override and that will be used as pricing analysis
input to compute the cds other outputs. Optional. No override is applied by
default. Note that only one pricing analysis input should be defined.
market_data_date : str or date or datetime or timedelta, optional
The market data date for pricing. Optional. By default, the market_data_date
date is the valuation_date or Today
report_ccy : str, optional
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). it is set for the fields ending with 'xxxinreportccy'. optional. the
default value is the notional currency.
upfront_amount_in_deal_ccy : float, optional
upfront_amount_in_deal_ccy to override and that will be used as pricing analysis
input to compute the cds other outputs. Optional. No override is applied by
default. Note that only one pricing analysis input should be defined.
upfront_percent : float, optional
upfront_percent to override and that will be used as pricing analysis input to
compute the cds other outputs. Optional. No override is applied by default. Note
that only one pricing analysis input should be defined.
valuation_date : str or date or datetime or timedelta, optional
The valuation date for pricing. Optional. If not set the valuation date is
equal to market_data_date or Today. For assets that contains a
settlementConvention, the default valuation date is equal to the settlementdate
of the Asset that is usually the TradeDate+SettlementConvention.
"""
def __init__(
self,
cash_amount_in_deal_ccy: Optional[float] = None,
clean_price_percent: Optional[float] = None,
conventional_spread_bp: Optional[float] = None,
market_data_date: "OptDateTime" = None,
report_ccy: Optional[str] = None,
upfront_amount_in_deal_ccy: Optional[float] = None,
upfront_percent: Optional[float] = None,
valuation_date: "OptDateTime" = None,
) -> None:
super().__init__()
self.cash_amount_in_deal_ccy = cash_amount_in_deal_ccy
self.clean_price_percent = clean_price_percent
self.conventional_spread_bp = conventional_spread_bp
self.market_data_date = market_data_date
self.report_ccy = report_ccy
self.upfront_amount_in_deal_ccy = upfront_amount_in_deal_ccy
self.upfront_percent = upfront_percent
self.valuation_date = valuation_date
@property
def cash_amount_in_deal_ccy(self):
"""
cash_amount_in_deal_ccy to override and that will be used as pricing analysis
input to compute the cds other outputs. Optional. No override is applied by
default. Note that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("cashAmountInDealCcy")
@cash_amount_in_deal_ccy.setter
def cash_amount_in_deal_ccy(self, value):
self._set_parameter("cashAmountInDealCcy", value)
@property
def clean_price_percent(self):
"""
clean_price_percent to override and that will be used as pricing analysis input
to compute the cds other outputs. Optional. No override is applied by default.
Note that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("cleanPricePercent")
@clean_price_percent.setter
def clean_price_percent(self, value):
self._set_parameter("cleanPricePercent", value)
@property
def conventional_spread_bp(self):
"""
conventional_spread_bp to override and that will be used as pricing analysis
input to compute the cds other outputs. Optional. No override is applied by
default. Note that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("conventionalSpreadBp")
@conventional_spread_bp.setter
def conventional_spread_bp(self, value):
self._set_parameter("conventionalSpreadBp", value)
@property
def market_data_date(self):
"""
The market data date for pricing. Optional. By default, the market_data_date
date is the valuation_date or Today
:return: str
"""
return self._get_parameter("marketDataDate")
@market_data_date.setter
def market_data_date(self, value):
self._set_datetime_parameter("marketDataDate", value)
@property
def report_ccy(self):
"""
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). It is set for the fields ending with 'xxxinreportccy'. Optional. The
default value is the notional currency.
:return: str
"""
return self._get_parameter("reportCcy")
@report_ccy.setter
def report_ccy(self, value):
self._set_parameter("reportCcy", value)
@property
def upfront_amount_in_deal_ccy(self):
"""
upfront_amount_in_deal_ccy to override and that will be used as pricing analysis
input to compute the cds other outputs. Optional. No override is applied by
default. Note that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("upfrontAmountInDealCcy")
@upfront_amount_in_deal_ccy.setter
def upfront_amount_in_deal_ccy(self, value):
self._set_parameter("upfrontAmountInDealCcy", value)
@property
def upfront_percent(self):
"""
upfront_percent to override and that will be used as pricing analysis input to
compute the cds other outputs. Optional. No override is applied by default. Note
that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("upfrontPercent")
@upfront_percent.setter
def upfront_percent(self, value):
self._set_parameter("upfrontPercent", value)
@property
def valuation_date(self):
"""
The valuation date for pricing. Optional. If not set the valuation date is
equal to market_data_date or Today. For assets that contains a
settlementConvention, the default valuation date is equal to the settlementdate
of the Asset that is usually the TradeDate+SettlementConvention.
:return: str
"""
return self._get_parameter("valuationDate")
@valuation_date.setter
def valuation_date(self, value):
self._set_datetime_parameter("valuationDate", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/cds/_cds_pricing_parameters.py | 0.956319 | 0.381018 | _cds_pricing_parameters.py | pypi |
from typing import Optional, Union
from ..._enums import (
DividendType,
ProjectedIndexCalculationMethod,
CreditSpreadType,
PriceSide,
RedemptionDateType,
VolatilityType,
VolatilityTermStructureType,
BenchmarkYieldSelectionMode,
YieldType,
QuoteFallbackLogic,
InflationMode,
)
from ..._models import BondRoundingParameters
from ..._object_definition import ObjectDefinition
from ....._types import OptDateTime
class PricingParameters(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
trade_date : str or date or datetime or timedelta, optional
Trade date of the bond. The trade_date is used to compute the default
valuation_date : By default the rule is that valuation_date = trade_date +
settlement_convention. Optional. By default, it is equal to market_data_date.
benchmark_yield_selection_mode : BenchmarkYieldSelectionMode or str, optional
The benchmark yield.
Default value is "Interpolate".
credit_spread_type : CreditSpreadType or str, optional
Credit curve spread type to use during pricing. Applicable for Convertible
Bonds.
dividend_type : DividendType or str, optional
Underlying stock dividend type used during pricing convertible bond. Applicable
for Convertible Bonds.
fx_price_side : PriceSide or str, optional
FX price side to consider when retrieving FX rates (Mid, Bid, Ask, Last, Close)
inflation_mode : InflationMode or str, optional
The indicator used to define whether instrument parameters should be adjusted
from inflation or not. Available only for inflation-linked instruments.
optional. By default, 'default' is used. That means it depends on the instrument
quotation convention.
price_side : PriceSide or str, optional
Quoted price side of the bond to use for pricing Analysis: Bid(Bid value),
Ask(Ask value), Mid(Mid value) Optional. By default the "Mid" price of the bond
is used.
projected_index_calculation_method : ProjectedIndexCalculationMethod or str, optional
Flag used to define how projected index is computed.
Default value is "ConstantIndex". It is defaulted to "ForwardIndex"
for Preferreds and Brazilian Debenture bonds.
quote_fallback_logic : QuoteFallbackLogic or str, optional
Enumeration used to define the fallback logic for the quotation of the
instrument.
redemption_date_type : RedemptionDateType or str, optional
Redemption type of the bond. It is used to compute the default redemption date.
Default value is "RedemptionAtWorstDate" for callable bond,
"RedemptionAtBestDate" for puttable bond or "RedemptionAtMaturityDate".
rounding_parameters : BondRoundingParameters, optional
Definition of rounding parameters to be applied on accrued, price or yield.
By default, rounding parameters are the ones defined in the bond structure.
volatility_term_structure_type : VolatilityTermStructureType or str, optional
Stock volatility trem structure type to use during pricing. Applicable for
Convertible Bonds.
volatility_type : VolatilityType or str, optional
Volatility type to use during pricing. Applicable for Convertible Bonds.
yield_type : YieldType or str, optional
yield_type that specifies the rate structure.
The default value is Native.
adjusted_clean_price : float, optional
Inflation Adjusted Clean price to override and that will be used as pricing
analysis input. The currency of the clean price is the cash flow currency (that
can be different to deal currency especially if "ComputeCashFlowWithReportCcy"
flag has been set to true). No override is applied by default. Note that only
one pricing analysis input should be defined.
adjusted_dirty_price : float, optional
Inflation Adjusted Dirty price to override and that will be used as pricing
analysis input. The currency of the dirty price is the cash flow currency (that
can be different to deal currency especially if "ComputeCashFlowWithReportCcy"
flag has been set to true). No override is applied by default. Note that only
one pricing analysis input should be defined.
adjusted_yield_percent : float, optional
Inflation Adjusted Yield (expressed in percent) to override and that will be
used as pricing analysis input. No override is applied by default.
Note that only one pricing analysis input should be defined.
apply_tax_to_full_pricing : bool, optional
Tax Parameters Flag to set these tax parameters for all
pricing/schedule/risk/spread.
By default Tax Params are applied only to Muni.
asset_swap_spread_bp : float, optional
AssetSwapSpread to override and that will be used as pricing analysis input to
compute the bond price. No override is applied by default. Note that
only one pricing anlysis input should be defined.
benchmark_at_issue_price : float, optional
Price of benchmark at issue to override and that will be used to compute
benchmark at redemption spread. No override is applied by default and
price is computed or retrieved from market data.
benchmark_at_issue_ric : str, optional
Ric of benchmark at issue to override and that will be used as pricing analysis
input to compute the bond price. Optional. No override is applied by default.
Note that only one pricing analysis input should be defined.
benchmark_at_issue_spread_bp : float, optional
Spread of benchmark at issue to override and that will be used as pricing
analysis input to compute the bond price. No override is applied by
default. Note that only one pricing analysis input should be defined.
benchmark_at_issue_yield_percent : float, optional
Yield of benchmark at issue to override and that will be used to compute
benchmark at redemption spread. No override is applied by default and
yield is computed or retrieved from market data.
benchmark_at_redemption_price : float, optional
Price of benchmark at redemption to override and that will be used to compute
benchmark at redemption spread. No override is applied by default and
price is computed or retrieved from market data.
benchmark_at_redemption_spread_bp : float, optional
Spread of benchmark at redemption to override and that will be used as pricing
analysis input to compute the bond price. No override is applied by
default. Note that only one pricing analysis input should be defined.
benchmark_at_redemption_yield_percent : float, optional
Yield of benchmark at redemption to override and that will be used to compute
benchmark at redemption spread. No override is applied by default and
yield is computed or retrieved from market data.
bond_recovery_rate_percent : float, optional
Bond Recovery Rate Percent set for convertible bond. Applicable for Convertible
Bonds.
cash_amount : float, optional
Cash amount to override and that will be used as pricing analysis input.
No override is applied by default. Note that only one pricing analysis
input should be defined.
cds_recovery_rate_percent : float, optional
Recovery rate percent used in credit curve related to convertible. Applicable
for Convertible Bonds.
clean_price : float, optional
Clean price to override and that will be used as pricing analysis input. The
currency of the clean price is the cash flow currency (that can be different to
deal currency especially if "ComputeCashFlowWithReportCcy" flag has been set to
true). No override is applied by default. Note that only one pricing analysis
input should be defined.
compute_cash_flow_from_issue_date : bool, optional
The indicator defines the date, from which the cash flows will be computed. The
possible values are:
- true: from issuedate,
- false: from tradedate. optional. default value is 'false'.
compute_cash_flow_with_report_ccy : bool, optional
The indicator used to express the instrument cash flows in the report currency.
The possible values are:
- true: the pricing will be done in the reporting currency using a fx forward
curve,
- false: the pricing will be done using notional currency. Optional. Default
value is 'false'.
concession_fee : float, optional
Fee to apply to the bond price; It is expressed in the same unit that the bond
price (percent or cash).
current_yield_percent : float, optional
Current Yield (expressed in percent) to override and that will be used as
pricing analysis input. No override is applied by default. Note that
only one pricing anlysis input should be defined.
dirty_price : float, optional
Dirty price to override and that will be used as pricing analysis input. The
currency of the dirty price is the cash flow currency (that can be different to
deal currency especially if "ComputeCashFlowWithReportCcy" flag has been set to
true). No override is applied by default. Note that only one pricing analysis
input should be defined.
discount_margin_bp : float, optional
Discount Margin basis points to override and that will be used as pricing
analysis input. Available only for Floating Rate Notes. No override is
applied by default. Note that only one pricing anlysis input should be defined.
discount_percent : float, optional
Discount (expressed in percent) to override and that will be used as pricing
analysis input. Should be used only for bond quoted in discount. Optional. No
override is applied by default. Note that only one pricing anlysis input should
be defined.
dividend_yield_percent : float, optional
Underlying Stock dividend yield percent. Applicable for Convertible Bonds.
edsf_benchmark_curve_yield_percent : float, optional
Yield of Euro-Dollar future benchmark curve (Edsf) to override and that will be
used to compute Euro-Dollar (Edsf) spread. No override is applied by
default and yield is computed or retrieved from market data.
edsf_spread_bp : float, optional
Spread of Euro-Dollar future benchmark curve (Edsf) to override and that will be
used as pricing analysis input to compute the bond price. This spread is
computed for USD Bond whose maturity is under 2 Years. No override is
applied by default. Note that only one pricing anlysis input should be defined.
efp_benchmark_price : float, optional
Price of EFP benchmark to override and that will be used to compute benchmark at
redemption spread in case the bond is an australian FRN. No override
is applied by default and price is computed or retrieved from market data.
efp_benchmark_ric : str, optional
RIC of EFP benchmark to override and that will be used as pricing analysis input
to compute the bond price in case the bond is an australian FRN. Ric can be
only "YTTc1" or "YTCc1".
Default value is "YTTc1".
efp_benchmark_yield_percent : float, optional
Yield of EFP benchmark to override and that will be used to compute benchmark at
redemption spread in case the bond is an australian FRN. No override
is applied by default and yield is computed or retrieved from market data.
efp_spread_bp : float, optional
Spread of EFP benchmark to override and that will be used as pricing analysis
input to compute the bond price in case the bond is an australian FRN.
No override is applied by default. Note that only one pricing analysis input
should be defined.
flat_credit_spread_bp : float, optional
Flat credit spread applied during pricing in basis points. Applicable when
SpreadType = FlatSpread. Applicable for Convertible Bonds.
flat_credit_spread_tenor : str, optional
Flat credit spread tenor on credit curve used during pricing to source credit
spread value. Applicable for Convertible Bonds.
fx_stock_correlation : float, optional
Correlation rate between underlying stock price and FX rate. Applicable for
cross-currency Convertible Bonds.
fx_volatility_percent : float, optional
FX volatility rate percent. Applicable for cross-currency Convertible Bonds.
fx_volatility_tenor : str, optional
Tenor on FX volatility to source FX volatility Rate Percent. Applicable for
cross-currency Convertible Bonds.
gov_country_benchmark_curve_price : float, optional
Price of government country benchmark to override and that will be used to
compute user defined spread. No override is applied by default and price is
computed or retrieved from market data.
gov_country_benchmark_curve_yield_percent : float, optional
Yield of government country benchmark to override and that will be used to
compute government country spread. No override is applied by default
and yield is computed or retrieved from market data.
gov_country_spread_bp : float, optional
Spread of government country benchmark to override and that will be used as
pricing analysis input to compute the bond price. Optional. No override is
applied by default. Note that only one pricing analysis input should be defined.
government_benchmark_curve_price : float, optional
Price of government benchmark to override and that will be used to compute user
defined spread. No override is applied by default and price is
computed or retrieved from market data.
government_benchmark_curve_yield_percent : float, optional
Yield of government benchmark to override and that will be used to compute
government spread. No override is applied by default and yield is
computed or retrieved from market data.
government_spread_bp : float, optional
Spread of government benchmark to override and that will be used as pricing
analysis input to compute the bond price. No override is applied by
default. Note that only one pricing analysis input should be defined.
issuer_benchmark_curve_yield_percent : float, optional
Yield of issuer benchmark to override and that will be used to compute issuer
spread. No override is applied by default and yield is computed or retrieved
from market data.
issuer_spread_bp : float, optional
Spread of issuer benchmark to override and that will be used as pricing analysis
input to compute the bond price. This spread is computed is for coprorate bonds.
Optional. No override is applied by default. Note that only one pricing anlysis
input should be defined.
market_data_date : str or date or datetime or timedelta, optional
The market data date for pricing.
By default, the market_data_date date is the valuation_date or Today
market_value_in_deal_ccy : float, optional
Market value in deal currency. This field can be used to compute notionalAmount
to apply to get this market value. Optional. By default the value is computed
from notional amount. NotionalAmount field, market_value_in_deal_ccy field and
market_value_in_report_ccy field cannot be set at defined at the same time.
market_value_in_report_ccy : float, optional
Market value in report currency. This field can be used to compute
notionalAmount to apply to get this market value. By default the value
is computed from notional amount. NotionalAmount field, market_value_in_deal_ccy
field and market_value_in_report_ccy field cannot be set at defined at the same
time.
net_price : float, optional
Net price to override and that will be used as pricing analysis input.
No override is applied by default. Note that only one pricing anlysis input
should be defined.
neutral_yield_percent : float, optional
Neutral Yield (expressed in percent) to override and that will be used as
pricing analysis input. This is available only for floating rate notes.
No override is applied by default. Note that only one pricing analysis
input should be defined.
ois_zc_benchmark_curve_yield_percent : float, optional
Yield of OIS benchmark to override and that will be used to compute OIS spread.
No override is applied by default and yield is computed or retrieved from market
data.
ois_zc_spread_bp : float, optional
Yield of OIS benchmark to override and that will be used as pricing analysis
input to compute the bond price. No override is applied by default.
Note that only one pricing analysis input should be defined.
option_adjusted_spread_bp : float, optional
Option Adjusted Spread to override and that will be used as pricing analysis
input to compute the bond price. No override is applied by default.
Note that only one pricing analysis input should be defined.
price : float, optional
Price to override and that will be used as pricing analysis input. This price
can be the clean price or dirty price depending on price type defined in bond
structure. The currency of the price is the cash flow currency (that can be
different to deal currency especially if "ComputeCashFlowWithReportCcy" flag has
been set to true). Optional. No override is applied by default. Note that only
one pricing analysis input should be defined.
quoted_price : float, optional
Quoted price to override and that will be used as pricing analysis input. Note
that a quoted price can be a price, a yield, a discount margin, a spread,...
depending on quotation type. The currency of the quoted price in case the bonnd
is price-quoted or cash-quoted is the deal currency (that can be different to
cash flow currency especially if "ComputeCashFlowWithReportCcy" flag has been
set to true). No override is applied by default. Note that only one pricing
analysis input should be defined.
rating_benchmark_curve_yield_percent : float, optional
Yield of rating benchmark to override and that will be used to compute rating
spread. No override is applied by default and yield is computed or retrieved
from market data.
rating_spread_bp : float, optional
Spread of rating benchmark to override and that will be used as pricing analysis
input to compute the bond price. No override is applied by default.
Note that only one pricing anlysis input should be defined.
redemption_date : str or date or datetime or timedelta, optional
Redemption date that defines the end date for yield and price computation. Used
only if redemption date type is set to "RedemptionAtCustomDate"
sector_rating_benchmark_curve_yield_percent : float, optional
Yield of sector rating benchmark to override and that will be used to compute
sector rating spread. No override is applied by default and yield is computed
or retrieved from market data.
sector_rating_spread_bp : float, optional
Spread of sector rating benchmark to override and that will be used as pricing
analysis input to compute the bond price. No override is applied by default.
Note that only one pricing anlysis input should be defined.
settlement_convention : str, optional
Settlement convention for the bond. By default the rule is that valuation_date =
trade_date + settlement_convention. By default use the settlement tenor defined
in the bond structure. Only two parameters among "settlement_convention",
"market_data_date" and "valuation_date" can be overriden at the same time.
simple_margin_bp : float, optional
Simple Margin basis points to override and that will be used as pricing
analysis input. Available only for Floating Rate Notes. No override is
applied by default. Note that only one pricing anlysis input should be defined.
stock_borrow_rate_percent : float, optional
Underlying stock borrow rate percent. Applicable for Convertible Bonds.
stock_flat_volatility_percent : float, optional
Underlying stock volatility percent used for convertible pricing. Applicable
when volatility_type = Flat Applicable for Convertible Bonds.
stock_flat_volatility_tenor : str, optional
Underlying Stock volatility tenor used during pricing to source volatility
percent value. Applicable when volatility_type = Flat Applicable for Convertible
Bonds.
stock_price_on_default : float, optional
Assumed stock price agreed in event of default. Applicable for Convertible
Bonds.
strip_yield_percent : float, optional
Strip Yield (expressed in percent) to override and that will be used as pricing
analysis input. No override is applied by default. Note that only one pricing
anlysis input should be defined.
swap_benchmark_curve_yield_percent : float, optional
Yield of swap benchmark to override and that will be used to compute swap
spread. No override is applied by default and yield is computed or
retrieved from market data.
swap_spread_bp : float, optional
Spread of swap benchmark to override and that will be used as pricing analysis
input to compute the bond price. No override is applied by default.
Note that only one pricing analysis input should be defined.
tax_on_capital_gain_percent : float, optional
Tax Rate on capital gain expressed in percent.
By default no tax is applied that means value is equal to 0.
tax_on_coupon_percent : float, optional
Tax Rate on Coupon expressed in percent.
By default no tax is applied that means value is equal to 0.
tax_on_price_percent : float, optional
Tax Rate on price expressed in percent.
By default no tax is applied that means value is equal to 0.
tax_on_yield_percent : float, optional
Tax Rate on Yield expressed in percent. Also named Tax on Yield Optional.
By default no tax is applied that means value is equal to 0.
use_settlement_date_from_quote : bool, optional
Specify whether to use the settlment date of the quote or the one computed from
the MarketData Date.
user_defined_benchmark_price : float, optional
price of user defined instrument to override and that will be used to compute
user defined spread. No override is applied by default and price is computed
or retrieved from market data.
user_defined_benchmark_yield_percent : float, optional
Yield of user defined instrument to override and that will be used to compute
user defined spread. No override is applied by default and yield is computed
or retrieved from market data.
user_defined_spread_bp : float, optional
Spread of user defined instrument to override and that will be used as pricing
analysis input to compute the bond price. No override is applied by default.
Note that only one pricing analysis input should be defined.
valuation_date : str or date or datetime or timedelta, optional
The valuation date for pricing. If not set the valuation date is equal
to market_data_date or Today. For assets that contains a settlement_convention,
the default valuation date is equal to the settlementdate of the Asset that
is usually the trade_date+settlement_convention.
yield_percent : float, optional
Yield (expressed in percent) to override and that will be used as pricing
analysis input. No override is applied by default. Note that only one pricing
analysis input should be defined.
z_spread_bp : float, optional
ZSpread to override and that will be used as pricing analysis input to compute
the bond price. No override is applied by default. Note that only one pricing
analysis input should be defined.
Examples
--------
>>> import refinitiv.data.content.ipa.financial_contracts as rdf
>>> definition = rdf.bond.Definition(
... instrument_code="US5YT=RR",
... payment_business_day_convention=rdf.bond.BusinessDayConvention.PREVIOUS_BUSINESS_DAY,
... pricing_parameters=rdf.bond.PricingParameters(
... benchmark_yield_selection_mode=rdf.bond.BenchmarkYieldSelectionMode.INTERPOLATE
... ),
... fields=["InstrumentDescription", "MarketDataDate", "Price", "YieldPercent", "ZSpreadBp"]
...)
>>> response = definition.get_data()
"""
def __init__(
self,
trade_date: "OptDateTime" = None,
benchmark_yield_selection_mode: Union[BenchmarkYieldSelectionMode, str] = None,
credit_spread_type: Union[CreditSpreadType, str] = None,
dividend_type: Union[DividendType, str] = None,
fx_price_side: Union[PriceSide, str] = None,
inflation_mode: Union[InflationMode, str] = None,
price_side: Union[PriceSide, str] = None,
projected_index_calculation_method: Union[ProjectedIndexCalculationMethod, str] = None,
quote_fallback_logic: Union[QuoteFallbackLogic, str] = None,
redemption_date_type: Union[RedemptionDateType, str] = None,
rounding_parameters: Union[BondRoundingParameters, dict] = None,
volatility_term_structure_type: Union[VolatilityTermStructureType, str] = None,
volatility_type: Union[VolatilityType, str] = None,
yield_type: Union[YieldType, str] = None,
adjusted_clean_price: Optional[float] = None,
adjusted_dirty_price: Optional[float] = None,
adjusted_yield_percent: Optional[float] = None,
apply_tax_to_full_pricing: Optional[bool] = None,
asset_swap_spread_bp: Optional[float] = None,
benchmark_at_issue_price: Optional[float] = None,
benchmark_at_issue_ric: Optional[str] = None,
benchmark_at_issue_spread_bp: Optional[float] = None,
benchmark_at_issue_yield_percent: Optional[float] = None,
benchmark_at_redemption_price: Optional[float] = None,
benchmark_at_redemption_spread_bp: Optional[float] = None,
benchmark_at_redemption_yield_percent: Optional[float] = None,
bond_recovery_rate_percent: Optional[float] = None,
cash_amount: Optional[float] = None,
cds_recovery_rate_percent: Optional[float] = None,
clean_price: Optional[float] = None,
compute_cash_flow_from_issue_date: Optional[bool] = None,
compute_cash_flow_with_report_ccy: Optional[bool] = None,
concession_fee: Optional[float] = None,
current_yield_percent: Optional[float] = None,
dirty_price: Optional[float] = None,
discount_margin_bp: Optional[float] = None,
discount_percent: Optional[float] = None,
dividend_yield_percent: Optional[float] = None,
edsf_benchmark_curve_yield_percent: Optional[float] = None,
edsf_spread_bp: Optional[float] = None,
efp_benchmark_price: Optional[float] = None,
efp_benchmark_ric: Optional[str] = None,
efp_benchmark_yield_percent: Optional[float] = None,
efp_spread_bp: Optional[float] = None,
flat_credit_spread_bp: Optional[float] = None,
flat_credit_spread_tenor: Optional[str] = None,
fx_stock_correlation: Optional[float] = None,
fx_volatility_percent: Optional[float] = None,
fx_volatility_tenor: Optional[str] = None,
gov_country_benchmark_curve_price: Optional[float] = None,
gov_country_benchmark_curve_yield_percent: Optional[float] = None,
gov_country_spread_bp: Optional[float] = None,
government_benchmark_curve_price: Optional[float] = None,
government_benchmark_curve_yield_percent: Optional[float] = None,
government_spread_bp: Optional[float] = None,
is_coupon_payment_adjustedfor_leap_year: Optional[bool] = None,
issuer_benchmark_curve_yield_percent: Optional[float] = None,
issuer_spread_bp: Optional[float] = None,
market_data_date: "OptDateTime" = None,
market_value_in_deal_ccy: Optional[float] = None,
market_value_in_report_ccy: Optional[float] = None,
net_price: Optional[float] = None,
neutral_yield_percent: Optional[float] = None,
next_coupon_rate_percent: Optional[float] = None,
ois_zc_benchmark_curve_yield_percent: Optional[float] = None,
ois_zc_spread_bp: Optional[float] = None,
option_adjusted_spread_bp: Optional[float] = None,
price: Optional[float] = None,
projected_index_percent: Optional[float] = None,
quoted_price: Optional[float] = None,
rating_benchmark_curve_yield_percent: Optional[float] = None,
rating_spread_bp: Optional[float] = None,
redemption_date: "OptDateTime" = None,
report_ccy: Optional[str] = None,
sector_rating_benchmark_curve_yield_percent: Optional[float] = None,
sector_rating_spread_bp: Optional[float] = None,
settlement_convention: Optional[str] = None,
simple_margin_bp: Optional[float] = None,
stock_borrow_rate_percent: Optional[float] = None,
stock_flat_volatility_percent: Optional[float] = None,
stock_flat_volatility_tenor: Optional[str] = None,
stock_price_on_default: Optional[float] = None,
strip_yield_percent: Optional[float] = None,
swap_benchmark_curve_yield_percent: Optional[float] = None,
swap_spread_bp: Optional[float] = None,
tax_on_capital_gain_percent: Optional[float] = None,
tax_on_coupon_percent: Optional[float] = None,
tax_on_price_percent: Optional[float] = None,
tax_on_yield_percent: Optional[float] = None,
use_settlement_date_from_quote: Optional[bool] = None,
user_defined_benchmark_price: Optional[float] = None,
user_defined_benchmark_yield_percent: Optional[float] = None,
user_defined_spread_bp: Optional[float] = None,
valuation_date: "OptDateTime" = None,
yield_percent: Optional[float] = None,
z_spread_bp: Optional[float] = None,
) -> None:
super().__init__()
self.trade_date = trade_date
self.benchmark_yield_selection_mode = benchmark_yield_selection_mode
self.credit_spread_type = credit_spread_type
self.dividend_type = dividend_type
self.fx_price_side = fx_price_side
self.inflation_mode = inflation_mode
self.price_side = price_side
self.projected_index_calculation_method = projected_index_calculation_method
self.quote_fallback_logic = quote_fallback_logic
self.redemption_date_type = redemption_date_type
self.rounding_parameters = rounding_parameters
self.volatility_term_structure_type = volatility_term_structure_type
self.volatility_type = volatility_type
self.yield_type = yield_type
self.adjusted_clean_price = adjusted_clean_price
self.adjusted_dirty_price = adjusted_dirty_price
self.adjusted_yield_percent = adjusted_yield_percent
self.apply_tax_to_full_pricing = apply_tax_to_full_pricing
self.asset_swap_spread_bp = asset_swap_spread_bp
self.benchmark_at_issue_price = benchmark_at_issue_price
self.benchmark_at_issue_ric = benchmark_at_issue_ric
self.benchmark_at_issue_spread_bp = benchmark_at_issue_spread_bp
self.benchmark_at_issue_yield_percent = benchmark_at_issue_yield_percent
self.benchmark_at_redemption_price = benchmark_at_redemption_price
self.benchmark_at_redemption_spread_bp = benchmark_at_redemption_spread_bp
self.benchmark_at_redemption_yield_percent = benchmark_at_redemption_yield_percent
self.bond_recovery_rate_percent = bond_recovery_rate_percent
self.cash_amount = cash_amount
self.cds_recovery_rate_percent = cds_recovery_rate_percent
self.clean_price = clean_price
self.compute_cash_flow_from_issue_date = compute_cash_flow_from_issue_date
self.compute_cash_flow_with_report_ccy = compute_cash_flow_with_report_ccy
self.concession_fee = concession_fee
self.current_yield_percent = current_yield_percent
self.dirty_price = dirty_price
self.discount_margin_bp = discount_margin_bp
self.discount_percent = discount_percent
self.dividend_yield_percent = dividend_yield_percent
self.edsf_benchmark_curve_yield_percent = edsf_benchmark_curve_yield_percent
self.edsf_spread_bp = edsf_spread_bp
self.efp_benchmark_price = efp_benchmark_price
self.efp_benchmark_ric = efp_benchmark_ric
self.efp_benchmark_yield_percent = efp_benchmark_yield_percent
self.efp_spread_bp = efp_spread_bp
self.flat_credit_spread_bp = flat_credit_spread_bp
self.flat_credit_spread_tenor = flat_credit_spread_tenor
self.fx_stock_correlation = fx_stock_correlation
self.fx_volatility_percent = fx_volatility_percent
self.fx_volatility_tenor = fx_volatility_tenor
self.gov_country_benchmark_curve_price = gov_country_benchmark_curve_price
self.gov_country_benchmark_curve_yield_percent = gov_country_benchmark_curve_yield_percent
self.gov_country_spread_bp = gov_country_spread_bp
self.government_benchmark_curve_price = government_benchmark_curve_price
self.government_benchmark_curve_yield_percent = government_benchmark_curve_yield_percent
self.government_spread_bp = government_spread_bp
self.is_coupon_payment_adjustedfor_leap_year = is_coupon_payment_adjustedfor_leap_year
self.issuer_benchmark_curve_yield_percent = issuer_benchmark_curve_yield_percent
self.issuer_spread_bp = issuer_spread_bp
self.market_data_date = market_data_date
self.market_value_in_deal_ccy = market_value_in_deal_ccy
self.market_value_in_report_ccy = market_value_in_report_ccy
self.net_price = net_price
self.neutral_yield_percent = neutral_yield_percent
self.next_coupon_rate_percent = next_coupon_rate_percent
self.ois_zc_benchmark_curve_yield_percent = ois_zc_benchmark_curve_yield_percent
self.ois_zc_spread_bp = ois_zc_spread_bp
self.option_adjusted_spread_bp = option_adjusted_spread_bp
self.price = price
self.projected_index_percent = projected_index_percent
self.quoted_price = quoted_price
self.rating_benchmark_curve_yield_percent = rating_benchmark_curve_yield_percent
self.rating_spread_bp = rating_spread_bp
self.redemption_date = redemption_date
self.report_ccy = report_ccy
self.sector_rating_benchmark_curve_yield_percent = sector_rating_benchmark_curve_yield_percent
self.sector_rating_spread_bp = sector_rating_spread_bp
self.settlement_convention = settlement_convention
self.simple_margin_bp = simple_margin_bp
self.stock_borrow_rate_percent = stock_borrow_rate_percent
self.stock_flat_volatility_percent = stock_flat_volatility_percent
self.stock_flat_volatility_tenor = stock_flat_volatility_tenor
self.stock_price_on_default = stock_price_on_default
self.strip_yield_percent = strip_yield_percent
self.swap_benchmark_curve_yield_percent = swap_benchmark_curve_yield_percent
self.swap_spread_bp = swap_spread_bp
self.tax_on_capital_gain_percent = tax_on_capital_gain_percent
self.tax_on_coupon_percent = tax_on_coupon_percent
self.tax_on_price_percent = tax_on_price_percent
self.tax_on_yield_percent = tax_on_yield_percent
self.use_settlement_date_from_quote = use_settlement_date_from_quote
self.user_defined_benchmark_price = user_defined_benchmark_price
self.user_defined_benchmark_yield_percent = user_defined_benchmark_yield_percent
self.user_defined_spread_bp = user_defined_spread_bp
self.valuation_date = valuation_date
self.yield_percent = yield_percent
self.z_spread_bp = z_spread_bp
@property
def benchmark_yield_selection_mode(self):
"""
The benchmark yield selection mode:
- Interpolate : do an interpolatation on yield curve to compute the reference
yield.
- Nearest : use the nearest point to find the reference yield. Optional. Default
value is "Interpolate".
:return: enum BenchmarkYieldSelectionMode
"""
return self._get_enum_parameter(BenchmarkYieldSelectionMode, "benchmarkYieldSelectionMode")
@benchmark_yield_selection_mode.setter
def benchmark_yield_selection_mode(self, value):
self._set_enum_parameter(BenchmarkYieldSelectionMode, "benchmarkYieldSelectionMode", value)
@property
def credit_spread_type(self):
"""
Credit curve spread type to use during pricing. Applicable for Convertible
Bonds.
:return: enum CreditSpreadType
"""
return self._get_enum_parameter(CreditSpreadType, "creditSpreadType")
@credit_spread_type.setter
def credit_spread_type(self, value):
self._set_enum_parameter(CreditSpreadType, "creditSpreadType", value)
@property
def dividend_type(self):
"""
Underlying stock dividend type used during pricing convertible bond. Applicable
for Convertible Bonds.
:return: enum DividendType
"""
return self._get_enum_parameter(DividendType, "dividendType")
@dividend_type.setter
def dividend_type(self, value):
self._set_enum_parameter(DividendType, "dividendType", value)
@property
def fx_price_side(self):
"""
FX price side to consider when retrieving FX rates (Mid, Bid, Ask, Last, Close)
:return: enum PriceSide
"""
return self._get_enum_parameter(PriceSide, "fxPriceSide")
@fx_price_side.setter
def fx_price_side(self, value):
self._set_enum_parameter(PriceSide, "fxPriceSide", value)
@property
def inflation_mode(self):
"""
The indicator used to define whether instrument parameters should be adjusted
from inflation or not. available only for inflation-linked instruments.
optional. by default, 'default' is used. that means it depends on the instrument
quotation convention.
:return: enum InflationMode
"""
return self._get_enum_parameter(InflationMode, "inflationMode")
@inflation_mode.setter
def inflation_mode(self, value):
self._set_enum_parameter(InflationMode, "inflationMode", value)
@property
def price_side(self):
"""
Quoted price side of the bond to use for pricing Analysis: Bid(Bid value),
Ask(Ask value), Mid(Mid value) Optional. By default the "Mid" price of the bond
is used.
:return: enum PriceSide
"""
return self._get_enum_parameter(PriceSide, "priceSide")
@price_side.setter
def price_side(self, value):
self._set_enum_parameter(PriceSide, "priceSide", value)
@property
def projected_index_calculation_method(self):
"""
Flag used to define how projected index is computed. Avalaible values are:
- "ConstantIndex" : future index values are considered as constant and equal to
projected index value.
- "ForwardIndex" : future index values are computed using a forward curve.
Optional. Default value is "ConstantIndex". It is defaulted to "ForwardIndex"
for Preferreds and Brazilian Debenture bonds.
:return: enum ProjectedIndexCalculationMethod
"""
return self._get_enum_parameter(ProjectedIndexCalculationMethod, "projectedIndexCalculationMethod")
@projected_index_calculation_method.setter
def projected_index_calculation_method(self, value):
self._set_enum_parameter(ProjectedIndexCalculationMethod, "projectedIndexCalculationMethod", value)
@property
def quote_fallback_logic(self):
"""
Enumeration used to define the fallback logic for the quotation of the
instrument. Available values are:
- "None": it means that there's no fallback logic. For example, if the user asks
for a "Ask" price and instrument is only quoted with a "Bid" price, it is an
error case.
- "BestField" : it means that there's a fallback logic to use another market
data field as quoted price. For example, if the user asks for a "Ask" price
and instrument is only quoted with a "Bid" price, "Bid" price can be used.
:return: enum QuoteFallbackLogic
"""
return self._get_enum_parameter(QuoteFallbackLogic, "quoteFallbackLogic")
@quote_fallback_logic.setter
def quote_fallback_logic(self, value):
self._set_enum_parameter(QuoteFallbackLogic, "quoteFallbackLogic", value)
@property
def redemption_date_type(self):
"""
Redemption type of the bond. It is used to compute the default redemption date:
- RedemptionAtMaturityDate : yield and price are computed at maturity date.
- RedemptionAtCallDate : yield and price are computed at call date (next call
date by default).
- RedemptionAtPutDate : yield and price are computed at put date (next put date
by default)..
- RedemptionAtWorstDate : yield and price are computed at the lowest yield date.
- RedemptionAtSinkDate : yield and price are computed at sinking fund date.
- RedemptionAtParDate : yield and price are computed at next par.
- RedemptionAtPremiumDate : yield and price are computed at next premium.
- RedemptionAtMakeWholeCallDate : yield and price are computed at Make Whole
Call date.
- RedemptionAtAverageLife : yield and price are computed at average life (case
of sinkable bonds)
- RedemptionAtNextDate : yield and price are computed at next redemption date
available. Optional. Default value is "RedemptionAtWorstDate" for callable
bond, "RedemptionAtBestDate" for puttable bond or "RedemptionAtMaturityDate".
:return: enum RedemptionDateType
"""
return self._get_enum_parameter(RedemptionDateType, "redemptionDateType")
@redemption_date_type.setter
def redemption_date_type(self, value):
self._set_enum_parameter(RedemptionDateType, "redemptionDateType", value)
@property
def rounding_parameters(self):
"""
Definition of rounding parameters to be applied on accrued, price or yield.
Optional. By default, rounding parameters are the ones defined in the bond
structure.
:return: object BondRoundingParameters
"""
return self._get_object_parameter(BondRoundingParameters, "roundingParameters")
@rounding_parameters.setter
def rounding_parameters(self, value):
self._set_object_parameter(BondRoundingParameters, "roundingParameters", value)
@property
def volatility_term_structure_type(self):
"""
Stock volatility trem structure type to use during pricing. Applicable for
Convertible Bonds.
:return: enum VolatilityTermStructureType
"""
return self._get_enum_parameter(VolatilityTermStructureType, "volatilityTermStructureType")
@volatility_term_structure_type.setter
def volatility_term_structure_type(self, value):
self._set_enum_parameter(VolatilityTermStructureType, "volatilityTermStructureType", value)
@property
def volatility_type(self):
"""
Volatility type to use during pricing. Applicable for Convertible Bonds.
:return: enum VolatilityType
"""
return self._get_enum_parameter(VolatilityType, "volatilityType")
@volatility_type.setter
def volatility_type(self, value):
self._set_enum_parameter(VolatilityType, "volatilityType", value)
@property
def yield_type(self):
"""
YieldType that specifies the rate structure.
- Native : no specific yield type is defined.
- UsGovt_Actual_Actual_6M : US Govt Act/Act 6M YTA.
- Isma_30_360_6M : ISMA 30/360 6M YTA.
- Euroland_Actual_Actual_6M : Euroland Equivalent Act/Act 6M YTA.
- Money_Market_Actual_360_6M : Money Market Act/360 6M YTA.
- Money_Market_Actual_365_6M : Money Market Act/365 6M YTA.
- Money_Market_Actual_Actual_6M : Money Market Act/Act 6M YTA.
- Bond_Actual_364_6M : Bond Market Act/364 6M YTA.
- Japanese_Simple_JAP_6M : Japanese Simple JAP 6M YTA.
- Japanese_Compunded_30_360_6M : Japanese Compounded 30/360 6M YTA.
- Moosmueller_30_360_6M : Moosmueller 30/360 6M YTA.
- Braess_Frangmeyer_30_360_6M : Braess-Frangmeyer 30/360 6M YTA.
- Weekend_30_360 : Week End 30/360 6M YTA Optional. The default value is Native.
:return: enum YieldType
"""
return self._get_enum_parameter(YieldType, "yieldType")
@yield_type.setter
def yield_type(self, value):
self._set_enum_parameter(YieldType, "yieldType", value)
@property
def adjusted_clean_price(self):
"""
Inflation Adjusted Clean price to override and that will be used as pricing
analysis input. The currency of the clean price is the cash flow currency (that
can be different to deal currency especially if "ComputeCashFlowWithReportCcy"
flag has been set to true). Optional. No override is applied by default. Note
that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("adjustedCleanPrice")
@adjusted_clean_price.setter
def adjusted_clean_price(self, value):
self._set_parameter("adjustedCleanPrice", value)
@property
def adjusted_dirty_price(self):
"""
Inflation Adjusted Dirty price to override and that will be used as pricing
analysis input. The currency of the dirty price is the cash flow currency (that
can be different to deal currency especially if "ComputeCashFlowWithReportCcy"
flag has been set to true). Optional. No override is applied by default. Note
that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("adjustedDirtyPrice")
@adjusted_dirty_price.setter
def adjusted_dirty_price(self, value):
self._set_parameter("adjustedDirtyPrice", value)
@property
def adjusted_yield_percent(self):
"""
Inflation Adjusted Yield (expressed in percent) to override and that will be
used as pricing analysis input. Optional. No override is applied by default.
Note that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("adjustedYieldPercent")
@adjusted_yield_percent.setter
def adjusted_yield_percent(self, value):
self._set_parameter("adjustedYieldPercent", value)
@property
def apply_tax_to_full_pricing(self):
"""
Tax Parameters Flag to set these tax parameters for all
pricing/schedule/risk/spread. Optional. By default Tax Params are applied only
to Muni.
:return: bool
"""
return self._get_parameter("applyTaxToFullPricing")
@apply_tax_to_full_pricing.setter
def apply_tax_to_full_pricing(self, value):
self._set_parameter("applyTaxToFullPricing", value)
@property
def asset_swap_spread_bp(self):
"""
AssetSwapSpread to override and that will be used as pricing analysis input to
compute the bond price. Optional. No override is applied by default. Note that
only one pricing anlysis input should be defined.
:return: float
"""
return self._get_parameter("assetSwapSpreadBp")
@asset_swap_spread_bp.setter
def asset_swap_spread_bp(self, value):
self._set_parameter("assetSwapSpreadBp", value)
@property
def benchmark_at_issue_price(self):
"""
Price of benchmark at issue to override and that will be used to compute
benchmark at redemption spread. Optional. No override is applied by default and
price is computed or retrieved from market data.
:return: float
"""
return self._get_parameter("benchmarkAtIssuePrice")
@benchmark_at_issue_price.setter
def benchmark_at_issue_price(self, value):
self._set_parameter("benchmarkAtIssuePrice", value)
@property
def benchmark_at_issue_ric(self):
"""
Ric of benchmark at issue to override and that will be used as pricing analysis
input to compute the bond price. Optional. No override is applied by default.
Note that only one pricing analysis input should be defined.
:return: str
"""
return self._get_parameter("benchmarkAtIssueRic")
@benchmark_at_issue_ric.setter
def benchmark_at_issue_ric(self, value):
self._set_parameter("benchmarkAtIssueRic", value)
@property
def benchmark_at_issue_spread_bp(self):
"""
Spread of benchmark at issue to override and that will be used as pricing
analysis input to compute the bond price. Optional. No override is applied by
default. Note that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("benchmarkAtIssueSpreadBp")
@benchmark_at_issue_spread_bp.setter
def benchmark_at_issue_spread_bp(self, value):
self._set_parameter("benchmarkAtIssueSpreadBp", value)
@property
def benchmark_at_issue_yield_percent(self):
"""
Yield of benchmark at issue to override and that will be used to compute
benchmark at redemption spread. Optional. No override is applied by default and
yield is computed or retrieved from market data.
:return: float
"""
return self._get_parameter("benchmarkAtIssueYieldPercent")
@benchmark_at_issue_yield_percent.setter
def benchmark_at_issue_yield_percent(self, value):
self._set_parameter("benchmarkAtIssueYieldPercent", value)
@property
def benchmark_at_redemption_price(self):
"""
Price of benchmark at redemption to override and that will be used to compute
benchmark at redemption spread. Optional. No override is applied by default and
price is computed or retrieved from market data.
:return: float
"""
return self._get_parameter("benchmarkAtRedemptionPrice")
@benchmark_at_redemption_price.setter
def benchmark_at_redemption_price(self, value):
self._set_parameter("benchmarkAtRedemptionPrice", value)
@property
def benchmark_at_redemption_spread_bp(self):
"""
Spread of benchmark at redemption to override and that will be used as pricing
analysis input to compute the bond price. Optional. No override is applied by
default. Note that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("benchmarkAtRedemptionSpreadBp")
@benchmark_at_redemption_spread_bp.setter
def benchmark_at_redemption_spread_bp(self, value):
self._set_parameter("benchmarkAtRedemptionSpreadBp", value)
@property
def benchmark_at_redemption_yield_percent(self):
"""
Yield of benchmark at redemption to override and that will be used to compute
benchmark at redemption spread. Optional. No override is applied by default and
yield is computed or retrieved from market data.
:return: float
"""
return self._get_parameter("benchmarkAtRedemptionYieldPercent")
@benchmark_at_redemption_yield_percent.setter
def benchmark_at_redemption_yield_percent(self, value):
self._set_parameter("benchmarkAtRedemptionYieldPercent", value)
@property
def bond_recovery_rate_percent(self):
"""
Bond Recovery Rate Percent set for convertible bond. Applicable for Convertible
Bonds.
:return: float
"""
return self._get_parameter("bondRecoveryRatePercent")
@bond_recovery_rate_percent.setter
def bond_recovery_rate_percent(self, value):
self._set_parameter("bondRecoveryRatePercent", value)
@property
def cash_amount(self):
"""
Cash amount to override and that will be used as pricing analysis input.
Optional. No override is applied by default. Note that only one pricing analysis
input should be defined.
:return: float
"""
return self._get_parameter("cashAmount")
@cash_amount.setter
def cash_amount(self, value):
self._set_parameter("cashAmount", value)
@property
def cds_recovery_rate_percent(self):
"""
Recovery rate percent used in credit curve related to convertible. Applicable
for Convertible Bonds.
:return: float
"""
return self._get_parameter("cdsRecoveryRatePercent")
@cds_recovery_rate_percent.setter
def cds_recovery_rate_percent(self, value):
self._set_parameter("cdsRecoveryRatePercent", value)
@property
def clean_price(self):
"""
Clean price to override and that will be used as pricing analysis input. The
currency of the clean price is the cash flow currency (that can be different to
deal currency especially if "ComputeCashFlowWithReportCcy" flag has been set to
true). Optional. No override is applied by default. Note that only one pricing
analysis input should be defined.
:return: float
"""
return self._get_parameter("cleanPrice")
@clean_price.setter
def clean_price(self, value):
self._set_parameter("cleanPrice", value)
@property
def compute_cash_flow_from_issue_date(self):
"""
The indicator defines the date, from which the cash flows will be computed. the
possible values are:
- true: from issuedate,
- false: from tradedate. optional. default value is 'false'.
:return: bool
"""
return self._get_parameter("computeCashFlowFromIssueDate")
@compute_cash_flow_from_issue_date.setter
def compute_cash_flow_from_issue_date(self, value):
self._set_parameter("computeCashFlowFromIssueDate", value)
@property
def compute_cash_flow_with_report_ccy(self):
"""
The indicator used to express the instrument cash flows in the report currency.
the possible values are:
- true: the pricing will be done in the reporting currency using a fx forward
curve,
- false: the pricing will be done using notional currency. optional. default
value is 'false'.
:return: bool
"""
return self._get_parameter("computeCashFlowWithReportCcy")
@compute_cash_flow_with_report_ccy.setter
def compute_cash_flow_with_report_ccy(self, value):
self._set_parameter("computeCashFlowWithReportCcy", value)
@property
def concession_fee(self):
"""
Fee to apply to the bond price; It is expressed in the same unit that the bond
price (percent or cash).
:return: float
"""
return self._get_parameter("concessionFee")
@concession_fee.setter
def concession_fee(self, value):
self._set_parameter("concessionFee", value)
@property
def current_yield_percent(self):
"""
Current Yield (expressed in percent) to override and that will be used as
pricing analysis input. Optional. No override is applied by default. Note that
only one pricing anlysis input should be defined.
:return: float
"""
return self._get_parameter("currentYieldPercent")
@current_yield_percent.setter
def current_yield_percent(self, value):
self._set_parameter("currentYieldPercent", value)
@property
def dirty_price(self):
"""
Dirty price to override and that will be used as pricing analysis input. The
currency of the dirty price is the cash flow currency (that can be different to
deal currency especially if "ComputeCashFlowWithReportCcy" flag has been set to
true). Optional. No override is applied by default. Note that only one pricing
analysis input should be defined.
:return: float
"""
return self._get_parameter("dirtyPrice")
@dirty_price.setter
def dirty_price(self, value):
self._set_parameter("dirtyPrice", value)
@property
def discount_margin_bp(self):
"""
Discount Margin basis points to override and that will be used as pricing
analysis input. Available only for Floating Rate Notes. Optional.No override is
applied by default. Note that only one pricing anlysis input should be defined.
:return: float
"""
return self._get_parameter("discountMarginBp")
@discount_margin_bp.setter
def discount_margin_bp(self, value):
self._set_parameter("discountMarginBp", value)
@property
def discount_percent(self):
"""
Discount (expressed in percent) to override and that will be used as pricing
analysis input. Should be used only for bond quoted in discount. Optional. No
override is applied by default. Note that only one pricing anlysis input should
be defined.
:return: float
"""
return self._get_parameter("discountPercent")
@discount_percent.setter
def discount_percent(self, value):
self._set_parameter("discountPercent", value)
@property
def dividend_yield_percent(self):
"""
Underlying Stock dividend yield percent. Applicable for Convertible Bonds.
:return: float
"""
return self._get_parameter("dividendYieldPercent")
@dividend_yield_percent.setter
def dividend_yield_percent(self, value):
self._set_parameter("dividendYieldPercent", value)
@property
def edsf_benchmark_curve_yield_percent(self):
"""
Yield of Euro-Dollar future benchmark curve (Edsf) to override and that will be
used to compute Euro-Dollar (Edsf) spread. Optional. No override is applied by
default and yield is computed or retrieved from market data.
:return: float
"""
return self._get_parameter("edsfBenchmarkCurveYieldPercent")
@edsf_benchmark_curve_yield_percent.setter
def edsf_benchmark_curve_yield_percent(self, value):
self._set_parameter("edsfBenchmarkCurveYieldPercent", value)
@property
def edsf_spread_bp(self):
"""
Spread of Euro-Dollar future benchmark curve (Edsf) to override and that will be
used as pricing analysis input to compute the bond price. This spread is
computed for USD Bond whose maturity is under 2 Years. Optional. No override is
applied by default. Note that only one pricing anlysis input should be defined.
:return: float
"""
return self._get_parameter("edsfSpreadBp")
@edsf_spread_bp.setter
def edsf_spread_bp(self, value):
self._set_parameter("edsfSpreadBp", value)
@property
def efp_benchmark_price(self):
"""
Price of EFP benchmark to override and that will be used to compute benchmark at
redemption spread in case the bond is an australian FRN. Optional. No override
is applied by default and price is computed or retrieved from market data.
:return: float
"""
return self._get_parameter("efpBenchmarkPrice")
@efp_benchmark_price.setter
def efp_benchmark_price(self, value):
self._set_parameter("efpBenchmarkPrice", value)
@property
def efp_benchmark_ric(self):
"""
RIC of EFP benchmark to override and that will be used as pricing analysis input
to compute the bond price in case the bond is an australian FRN. Ric can be
only "YTTc1" or "YTCc1". Optional. Default value is "YTTc1".
:return: str
"""
return self._get_parameter("efpBenchmarkRic")
@efp_benchmark_ric.setter
def efp_benchmark_ric(self, value):
self._set_parameter("efpBenchmarkRic", value)
@property
def efp_benchmark_yield_percent(self):
"""
Yield of EFP benchmark to override and that will be used to compute benchmark at
redemption spread in case the bond is an australian FRN. Optional. No override
is applied by default and yield is computed or retrieved from market data.
:return: float
"""
return self._get_parameter("efpBenchmarkYieldPercent")
@efp_benchmark_yield_percent.setter
def efp_benchmark_yield_percent(self, value):
self._set_parameter("efpBenchmarkYieldPercent", value)
@property
def efp_spread_bp(self):
"""
Spread of EFP benchmark to override and that will be used as pricing analysis
input to compute the bond price in case the bond is an australian FRN. Optional.
No override is applied by default. Note that only one pricing analysis input
should be defined.
:return: float
"""
return self._get_parameter("efpSpreadBp")
@efp_spread_bp.setter
def efp_spread_bp(self, value):
self._set_parameter("efpSpreadBp", value)
@property
def flat_credit_spread_bp(self):
"""
Flat credit spread applied during pricing in basis points. Applicable when
SpreadType = FlatSpread. Applicable for Convertible Bonds.
:return: float
"""
return self._get_parameter("flatCreditSpreadBp")
@flat_credit_spread_bp.setter
def flat_credit_spread_bp(self, value):
self._set_parameter("flatCreditSpreadBp", value)
@property
def flat_credit_spread_tenor(self):
"""
Flat credit spread tenor on credit curve used during pricing to source credit
spread value. Applicable for Convertible Bonds.
:return: str
"""
return self._get_parameter("flatCreditSpreadTenor")
@flat_credit_spread_tenor.setter
def flat_credit_spread_tenor(self, value):
self._set_parameter("flatCreditSpreadTenor", value)
@property
def fx_stock_correlation(self):
"""
Correlation rate between underlying stock price and FX rate. Applicable for
cross-currency Convertible Bonds.
:return: float
"""
return self._get_parameter("fxStockCorrelation")
@fx_stock_correlation.setter
def fx_stock_correlation(self, value):
self._set_parameter("fxStockCorrelation", value)
@property
def fx_volatility_percent(self):
"""
FX volatility rate percent. Applicable for cross-currency Convertible Bonds.
:return: float
"""
return self._get_parameter("fxVolatilityPercent")
@fx_volatility_percent.setter
def fx_volatility_percent(self, value):
self._set_parameter("fxVolatilityPercent", value)
@property
def fx_volatility_tenor(self):
"""
Tenor on FX volatility to source FX volatility Rate Percent. Applicable for
cross-currency Convertible Bonds.
:return: str
"""
return self._get_parameter("fxVolatilityTenor")
@fx_volatility_tenor.setter
def fx_volatility_tenor(self, value):
self._set_parameter("fxVolatilityTenor", value)
@property
def gov_country_benchmark_curve_price(self):
"""
Price of government country benchmark to override and that will be used to
compute user defined spread. Optional. No override is applied by default and
price is computed or retrieved from market data.
:return: float
"""
return self._get_parameter("govCountryBenchmarkCurvePrice")
@gov_country_benchmark_curve_price.setter
def gov_country_benchmark_curve_price(self, value):
self._set_parameter("govCountryBenchmarkCurvePrice", value)
@property
def gov_country_benchmark_curve_yield_percent(self):
"""
Yield of government country benchmark to override and that will be used to
compute government country spread. Optional. No override is applied by default
and yield is computed or retrieved from market data.
:return: float
"""
return self._get_parameter("govCountryBenchmarkCurveYieldPercent")
@gov_country_benchmark_curve_yield_percent.setter
def gov_country_benchmark_curve_yield_percent(self, value):
self._set_parameter("govCountryBenchmarkCurveYieldPercent", value)
@property
def gov_country_spread_bp(self):
"""
Spread of government country benchmark to override and that will be used as
pricing analysis input to compute the bond price. Optional. No override is
applied by default. Note that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("govCountrySpreadBp")
@gov_country_spread_bp.setter
def gov_country_spread_bp(self, value):
self._set_parameter("govCountrySpreadBp", value)
@property
def government_benchmark_curve_price(self):
"""
price of government benchmark to override and that will be used to compute user
defined spread. Optional. No override is applied by default and price is
computed or retrieved from market data.
:return: float
"""
return self._get_parameter("governmentBenchmarkCurvePrice")
@government_benchmark_curve_price.setter
def government_benchmark_curve_price(self, value):
self._set_parameter("governmentBenchmarkCurvePrice", value)
@property
def government_benchmark_curve_yield_percent(self):
"""
Yield of government benchmark to override and that will be used to compute
government spread. Optional. No override is applied by default and yield is
computed or retrieved from market data.
:return: float
"""
return self._get_parameter("governmentBenchmarkCurveYieldPercent")
@government_benchmark_curve_yield_percent.setter
def government_benchmark_curve_yield_percent(self, value):
self._set_parameter("governmentBenchmarkCurveYieldPercent", value)
@property
def government_spread_bp(self):
"""
Spread of government benchmark to override and that will be used as pricing
analysis input to compute the bond price. Optional. No override is applied by
default. Note that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("governmentSpreadBp")
@government_spread_bp.setter
def government_spread_bp(self, value):
self._set_parameter("governmentSpreadBp", value)
@property
def is_coupon_payment_adjustedfor_leap_year(self):
"""
An indicator whether a fixed coupon market convention with 365.25 days in a year
to is used to calculate yield and margin. it can be requested if
projectedindexcalculationmethod = "constantcouponpayment". the possible values
are:
- true: a fixed coupon market convention is used,
- false: a fixed coupon market convention is not used.
:return: bool
"""
return self._get_parameter("isCouponPaymentAdjustedforLeapYear")
@is_coupon_payment_adjustedfor_leap_year.setter
def is_coupon_payment_adjustedfor_leap_year(self, value):
self._set_parameter("isCouponPaymentAdjustedforLeapYear", value)
@property
def issuer_benchmark_curve_yield_percent(self):
"""
Yield of issuer benchmark to override and that will be used to compute issuer
spread. Optional. No override is applied by default and yield is computed or
retrieved from market data.
:return: float
"""
return self._get_parameter("issuerBenchmarkCurveYieldPercent")
@issuer_benchmark_curve_yield_percent.setter
def issuer_benchmark_curve_yield_percent(self, value):
self._set_parameter("issuerBenchmarkCurveYieldPercent", value)
@property
def issuer_spread_bp(self):
"""
Spread of issuer benchmark to override and that will be used as pricing analysis
input to compute the bond price. This spread is computed is for coprorate bonds.
Optional. No override is applied by default. Note that only one pricing anlysis
input should be defined.
:return: float
"""
return self._get_parameter("issuerSpreadBp")
@issuer_spread_bp.setter
def issuer_spread_bp(self, value):
self._set_parameter("issuerSpreadBp", value)
@property
def market_data_date(self):
"""
The market data date for pricing. Optional. By default, the market_data_date
date is the valuation_date or Today
:return: str
"""
return self._get_parameter("marketDataDate")
@market_data_date.setter
def market_data_date(self, value):
self._set_datetime_parameter("marketDataDate", value)
@property
def market_value_in_deal_ccy(self):
"""
Market value in deal currency. This field can be used to compute notionalAmount
to apply to get this market value. Optional. By default the value is computed
from notional amount. NotionalAmount field, market_value_in_deal_ccy field and
market_value_in_report_ccy field cannot be set at defined at the same time.
:return: float
"""
return self._get_parameter("marketValueInDealCcy")
@market_value_in_deal_ccy.setter
def market_value_in_deal_ccy(self, value):
self._set_parameter("marketValueInDealCcy", value)
@property
def market_value_in_report_ccy(self):
"""
Market value in report currency. This field can be used to compute
notionalAmount to apply to get this market value. Optional. By default the value
is computed from notional amount. NotionalAmount field, market_value_in_deal_ccy
field and market_value_in_report_ccy field cannot be set at defined at the same
time.
:return: float
"""
return self._get_parameter("marketValueInReportCcy")
@market_value_in_report_ccy.setter
def market_value_in_report_ccy(self, value):
self._set_parameter("marketValueInReportCcy", value)
@property
def net_price(self):
"""
Net price to override and that will be used as pricing analysis input. Optional.
No override is applied by default. Note that only one pricing anlysis input
should be defined.
:return: float
"""
return self._get_parameter("netPrice")
@net_price.setter
def net_price(self, value):
self._set_parameter("netPrice", value)
@property
def neutral_yield_percent(self):
"""
Neutral Yield (expressed in percent) to override and that will be used as
pricing analysis input. This is available only for floating rate notes.
Optional. No override is applied by default. Note that only one pricing analysis
input should be defined.
:return: float
"""
return self._get_parameter("neutralYieldPercent")
@neutral_yield_percent.setter
def neutral_yield_percent(self, value):
self._set_parameter("neutralYieldPercent", value)
@property
def next_coupon_rate_percent(self):
"""
The user current coupon in case of a frn bond. optional. the current coupon is
computed from the current index.
:return: float
"""
return self._get_parameter("nextCouponRatePercent")
@next_coupon_rate_percent.setter
def next_coupon_rate_percent(self, value):
self._set_parameter("nextCouponRatePercent", value)
@property
def ois_zc_benchmark_curve_yield_percent(self):
"""
Yield of OIS benchmark to override and that will be used to compute OIS spread.
Optional. No override is applied by default and yield is computed or retrieved
from market data.
:return: float
"""
return self._get_parameter("oisZcBenchmarkCurveYieldPercent")
@ois_zc_benchmark_curve_yield_percent.setter
def ois_zc_benchmark_curve_yield_percent(self, value):
self._set_parameter("oisZcBenchmarkCurveYieldPercent", value)
@property
def ois_zc_spread_bp(self):
"""
Yield of OIS benchmark to override and that will be used as pricing analysis
input to compute the bond price. Optional. No override is applied by default.
Note that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("oisZcSpreadBp")
@ois_zc_spread_bp.setter
def ois_zc_spread_bp(self, value):
self._set_parameter("oisZcSpreadBp", value)
@property
def option_adjusted_spread_bp(self):
"""
Option Adjusted Spread to override and that will be used as pricing analysis
input to compute the bond price. Optional. No override is applied by default.
Note that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("optionAdjustedSpreadBp")
@option_adjusted_spread_bp.setter
def option_adjusted_spread_bp(self, value):
self._set_parameter("optionAdjustedSpreadBp", value)
@property
def price(self):
"""
Price to override and that will be used as pricing analysis input. This price
can be the clean price or dirty price depending on price type defined in bond
structure. The currency of the price is the cash flow currency (that can be
different to deal currency especially if "ComputeCashFlowWithReportCcy" flag has
been set to true). Optional. No override is applied by default. Note that only
one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("price")
@price.setter
def price(self, value):
self._set_parameter("price", value)
@property
def projected_index_percent(self):
"""
The projected index rate value used for calculation of future cash flows of the
floating rate instrument. usually the projected index is the last known value of
the index. the value is expressed in percentage. this parameter can be used if
the parameter projectedindexcalculationmethod is set to constantindex. optional.
by default, the projected index rate value is computed from the market data
according to the instrument convention.
:return: float
"""
return self._get_parameter("projectedIndexPercent")
@projected_index_percent.setter
def projected_index_percent(self, value):
self._set_parameter("projectedIndexPercent", value)
@property
def quoted_price(self):
"""
Quoted price to override and that will be used as pricing analysis input. Note
that a quoted price can be a price, a yield, a discount margin, a spread,...
depending on quotation type. The currency of the quoted price in case the bonnd
is price-quoted or cash-quoted is the deal currency (that can be different to
cash flow currency especially if "ComputeCashFlowWithReportCcy" flag has been
set to true). Optional. No override is applied by default. Note that only one
pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("quotedPrice")
@quoted_price.setter
def quoted_price(self, value):
self._set_parameter("quotedPrice", value)
@property
def rating_benchmark_curve_yield_percent(self):
"""
Yield of rating benchmark to override and that will be used to compute rating
spread. Optional. No override is applied by default and yield is computed or
retrieved from market data.
:return: float
"""
return self._get_parameter("ratingBenchmarkCurveYieldPercent")
@rating_benchmark_curve_yield_percent.setter
def rating_benchmark_curve_yield_percent(self, value):
self._set_parameter("ratingBenchmarkCurveYieldPercent", value)
@property
def rating_spread_bp(self):
"""
Spread of rating benchmark to override and that will be used as pricing analysis
input to compute the bond price. Optional. No override is applied by default.
Note that only one pricing anlysis input should be defined.
:return: float
"""
return self._get_parameter("ratingSpreadBp")
@rating_spread_bp.setter
def rating_spread_bp(self, value):
self._set_parameter("ratingSpreadBp", value)
@property
def redemption_date(self):
"""
Redemption date that defines the end date for yield and price computation. Used
only if redemption date type is set to "RedemptionAtCustomDate"
:return: str
"""
return self._get_parameter("redemptionDate")
@redemption_date.setter
def redemption_date(self, value):
self._set_datetime_parameter("redemptionDate", value)
@property
def report_ccy(self):
"""
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). it is set for the fields ending with 'xxxinreportccy'. optional. the
default value is the notional currency.
:return: str
"""
return self._get_parameter("reportCcy")
@report_ccy.setter
def report_ccy(self, value):
self._set_parameter("reportCcy", value)
@property
def sector_rating_benchmark_curve_yield_percent(self):
"""
Yield of sector rating benchmark to override and that will be used to compute
sector rating spread. Optional. No override is applied by default and yield is
computed or retrieved from market data.
:return: float
"""
return self._get_parameter("sectorRatingBenchmarkCurveYieldPercent")
@sector_rating_benchmark_curve_yield_percent.setter
def sector_rating_benchmark_curve_yield_percent(self, value):
self._set_parameter("sectorRatingBenchmarkCurveYieldPercent", value)
@property
def sector_rating_spread_bp(self):
"""
Spread of sector rating benchmark to override and that will be used as pricing
analysis input to compute the bond price. Optional. No override is applied by
default. Note that only one pricing anlysis input should be defined.
:return: float
"""
return self._get_parameter("sectorRatingSpreadBp")
@sector_rating_spread_bp.setter
def sector_rating_spread_bp(self, value):
self._set_parameter("sectorRatingSpreadBp", value)
@property
def settlement_convention(self):
"""
Settlement convention for the bond. By default the rule is that valuation_date =
trade_date + settlement_convention. Optional. By default use the settlement
tenor defined in the bond structure. Only two parameters among
"settlement_convention", "market_data_date" and "valuation_date" can be
overriden at the same time.
:return: str
"""
return self._get_parameter("settlementConvention")
@settlement_convention.setter
def settlement_convention(self, value):
self._set_parameter("settlementConvention", value)
@property
def simple_margin_bp(self):
"""
Simple Margin basis points to override and that will be used as pricing
analysis input. Available only for Floating Rate Notes. Optional.No override is
applied by default. Note that only one pricing anlysis input should be defined.
:return: float
"""
return self._get_parameter("simpleMarginBp")
@simple_margin_bp.setter
def simple_margin_bp(self, value):
self._set_parameter("simpleMarginBp", value)
@property
def stock_borrow_rate_percent(self):
"""
Underlying stock borrow rate percent. Applicable for Convertible Bonds.
:return: float
"""
return self._get_parameter("stockBorrowRatePercent")
@stock_borrow_rate_percent.setter
def stock_borrow_rate_percent(self, value):
self._set_parameter("stockBorrowRatePercent", value)
@property
def stock_flat_volatility_percent(self):
"""
Underlying stock volatility percent used for convertible pricing. Applicable
when volatility_type = Flat Applicable for Convertible Bonds.
:return: float
"""
return self._get_parameter("stockFlatVolatilityPercent")
@stock_flat_volatility_percent.setter
def stock_flat_volatility_percent(self, value):
self._set_parameter("stockFlatVolatilityPercent", value)
@property
def stock_flat_volatility_tenor(self):
"""
Underlying Stock volatility tenor used during pricing to source volatility
percent value. Applicable when volatility_type = Flat Applicable for Convertible
Bonds.
:return: str
"""
return self._get_parameter("stockFlatVolatilityTenor")
@stock_flat_volatility_tenor.setter
def stock_flat_volatility_tenor(self, value):
self._set_parameter("stockFlatVolatilityTenor", value)
@property
def stock_price_on_default(self):
"""
Assumed stock price agreed in event of default. Applicable for Convertible
Bonds.
:return: float
"""
return self._get_parameter("stockPriceOnDefault")
@stock_price_on_default.setter
def stock_price_on_default(self, value):
self._set_parameter("stockPriceOnDefault", value)
@property
def strip_yield_percent(self):
"""
Strip Yield (expressed in percent) to override and that will be used as pricing
analysis input. Optional. No override is applied by default. Note that only one
pricing anlysis input should be defined.
:return: float
"""
return self._get_parameter("stripYieldPercent")
@strip_yield_percent.setter
def strip_yield_percent(self, value):
self._set_parameter("stripYieldPercent", value)
@property
def swap_benchmark_curve_yield_percent(self):
"""
Yield of swap benchmark to override and that will be used to compute swap
spread. Optional. No override is applied by default and yield is computed or
retrieved from market data.
:return: float
"""
return self._get_parameter("swapBenchmarkCurveYieldPercent")
@swap_benchmark_curve_yield_percent.setter
def swap_benchmark_curve_yield_percent(self, value):
self._set_parameter("swapBenchmarkCurveYieldPercent", value)
@property
def swap_spread_bp(self):
"""
Spread of swap benchmark to override and that will be used as pricing analysis
input to compute the bond price. Optional. No override is applied by default.
Note that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("swapSpreadBp")
@swap_spread_bp.setter
def swap_spread_bp(self, value):
self._set_parameter("swapSpreadBp", value)
@property
def tax_on_capital_gain_percent(self):
"""
Tax Rate on capital gain expressed in percent. Optional. By default no tax is
applied that means value is equal to 0.
:return: float
"""
return self._get_parameter("taxOnCapitalGainPercent")
@tax_on_capital_gain_percent.setter
def tax_on_capital_gain_percent(self, value):
self._set_parameter("taxOnCapitalGainPercent", value)
@property
def tax_on_coupon_percent(self):
"""
Tax Rate on Coupon expressed in percent. Optional. By default no tax is applied
that means value is equal to 0.
:return: float
"""
return self._get_parameter("taxOnCouponPercent")
@tax_on_coupon_percent.setter
def tax_on_coupon_percent(self, value):
self._set_parameter("taxOnCouponPercent", value)
@property
def tax_on_price_percent(self):
"""
Tax Rate on price expressed in percent. Optional. By default no tax is applied
that means value is equal to 0.
:return: float
"""
return self._get_parameter("taxOnPricePercent")
@tax_on_price_percent.setter
def tax_on_price_percent(self, value):
self._set_parameter("taxOnPricePercent", value)
@property
def tax_on_yield_percent(self):
"""
Tax Rate on Yield expressed in percent. Also named Tax on Yield Optional. By
default no tax is applied that means value is equal to 0.
:return: float
"""
return self._get_parameter("taxOnYieldPercent")
@tax_on_yield_percent.setter
def tax_on_yield_percent(self, value):
self._set_parameter("taxOnYieldPercent", value)
@property
def trade_date(self):
"""
Trade date of the bond. The trade_date is used to compute the default
valuation_date : By default the rule is that valuation_date = trade_date +
settlement_convention. Optional. By default, it is equal to market_data_date.
:return: str
"""
return self._get_parameter("tradeDate")
@trade_date.setter
def trade_date(self, value):
self._set_datetime_parameter("tradeDate", value)
@property
def use_settlement_date_from_quote(self):
"""
Specify whether to use the settlment date of the quote or the one computed from
the MarketData Date
:return: bool
"""
return self._get_parameter("useSettlementDateFromQuote")
@use_settlement_date_from_quote.setter
def use_settlement_date_from_quote(self, value):
self._set_parameter("useSettlementDateFromQuote", value)
@property
def user_defined_benchmark_price(self):
"""
Price of user defined instrument to override and that will be used to compute
user defined spread. Optional. No override is applied by default and price is
computed or retrieved from market data.
:return: float
"""
return self._get_parameter("userDefinedBenchmarkPrice")
@user_defined_benchmark_price.setter
def user_defined_benchmark_price(self, value):
self._set_parameter("userDefinedBenchmarkPrice", value)
@property
def user_defined_benchmark_yield_percent(self):
"""
Yield of user defined instrument to override and that will be used to compute
user defined spread. Optional. No override is applied by default and yield is
computed or retrieved from market data.
:return: float
"""
return self._get_parameter("userDefinedBenchmarkYieldPercent")
@user_defined_benchmark_yield_percent.setter
def user_defined_benchmark_yield_percent(self, value):
self._set_parameter("userDefinedBenchmarkYieldPercent", value)
@property
def user_defined_spread_bp(self):
"""
Spread of user defined instrument to override and that will be used as pricing
analysis input to compute the bond price. Optional. No override is applied by
default. Note that only one pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("userDefinedSpreadBp")
@user_defined_spread_bp.setter
def user_defined_spread_bp(self, value):
self._set_parameter("userDefinedSpreadBp", value)
@property
def valuation_date(self):
"""
The valuation date for pricing. Optional. If not set the valuation date is
equal to market_data_date or Today. For assets that contains a
settlement_convention, the default valuation date is equal to the
settlementdate of the Asset that is usually the
trade_date+settlement_convention.
:return: str
"""
return self._get_parameter("valuationDate")
@valuation_date.setter
def valuation_date(self, value):
self._set_datetime_parameter("valuationDate", value)
@property
def yield_percent(self):
"""
Yield (expressed in percent) to override and that will be used as pricing
analysis input. Optional. No override is applied by default. Note that only one
pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("yieldPercent")
@yield_percent.setter
def yield_percent(self, value):
self._set_parameter("yieldPercent", value)
@property
def z_spread_bp(self):
"""
ZSpread to override and that will be used as pricing analysis input to compute
the bond price. Optional. No override is applied by default. Note that only one
pricing analysis input should be defined.
:return: float
"""
return self._get_parameter("zSpreadBp")
@z_spread_bp.setter
def z_spread_bp(self, value):
self._set_parameter("zSpreadBp", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/bond/_bond_pricing_parameters.py | 0.957223 | 0.572065 | _bond_pricing_parameters.py | pypi |
from typing import Optional, Union
from ....._types import OptDateTime
from ..._enums import (
DateRollingConvention,
DayCountBasis,
InterestType,
StubRule,
Frequency,
AdjustInterestToPaymentDate,
IndexCompoundingMethod,
BusinessDayConvention,
Direction,
IndexAverageMethod,
)
from ..._models import AmortizationItem
from .._instrument_definition import InstrumentDefinition
from ..._enums import IndexObservationMethod
class BondInstrumentDefinition(InstrumentDefinition):
def __init__(
self,
instrument_tag: Optional[str] = None,
instrument_code: Optional[str] = None,
end_date: "OptDateTime" = None,
direction: Union[Direction, str] = None,
interest_type: Union[InterestType, str] = None,
notional_ccy: Optional[str] = None,
notional_amount: Optional[float] = None,
fixed_rate_percent: Optional[float] = None,
spread_bp: Optional[float] = None,
interest_payment_frequency: Union[Frequency, str] = None,
interest_calculation_method: Union[DayCountBasis, str] = None,
accrued_calculation_method: Union[DayCountBasis, str] = None,
payment_business_day_convention: Union[BusinessDayConvention, str] = None,
payment_roll_convention: Union[DateRollingConvention, str] = None,
index_reset_frequency: Union[Frequency, str] = None,
index_fixing_lag: Optional[int] = None,
first_regular_payment_date: "OptDateTime" = None,
last_regular_payment_date: "OptDateTime" = None,
amortization_schedule: Optional[AmortizationItem] = None,
payment_business_days: Optional[str] = None,
adjust_interest_to_payment_date: Union[AdjustInterestToPaymentDate, str] = None,
index_compounding_method: Union[IndexCompoundingMethod, str] = None,
interest_payment_delay: Optional[int] = None,
stub_rule: Union[StubRule, str] = None,
issue_date: "OptDateTime" = None,
index_average_method: Union[IndexAverageMethod, str] = None,
first_accrual_date: "OptDateTime" = None,
floor_strike_percent: Optional[float] = None,
index_fixing_ric: Optional[str] = None,
is_perpetual: Optional[bool] = None,
template: Optional[str] = None,
index_observation_method: Union[IndexObservationMethod, str] = None,
fixed_rate_percent_schedule: Optional[dict] = None,
instrument_type: Optional[str] = "Bond",
) -> None:
super().__init__()
self.instrument_tag = instrument_tag
self.instrument_code = instrument_code
self.end_date = end_date
self.direction = direction
self.interest_type = interest_type
self.notional_ccy = notional_ccy
self.notional_amount = notional_amount
self.fixed_rate_percent = fixed_rate_percent
self.spread_bp = spread_bp
self.interest_payment_frequency = interest_payment_frequency
self.interest_calculation_method = interest_calculation_method
self.accrued_calculation_method = accrued_calculation_method
self.payment_business_day_convention = payment_business_day_convention
self.payment_roll_convention = payment_roll_convention
self.index_reset_frequency = index_reset_frequency
self.index_fixing_lag = index_fixing_lag
self.first_regular_payment_date = first_regular_payment_date
self.last_regular_payment_date = last_regular_payment_date
self.amortization_schedule = amortization_schedule
self.payment_business_days = payment_business_days
self.adjust_interest_to_payment_date = adjust_interest_to_payment_date
self.index_compounding_method = index_compounding_method
self.interest_payment_delay = interest_payment_delay
self.stub_rule = stub_rule
self.issue_date = issue_date
self.index_average_method = index_average_method
self.first_accrual_date = first_accrual_date
self.floor_strike_percent = floor_strike_percent
self.index_fixing_ric = index_fixing_ric
self.is_perpetual = is_perpetual
self.template = template
self.index_observation_method = index_observation_method
self.fixed_rate_percent_schedule = fixed_rate_percent_schedule
self._instrument_type = instrument_type
def get_instrument_type(self):
return self._instrument_type
@property
def accrued_calculation_method(self):
"""
The Day Count Basis method used to calculate the accrued interest payments.
Optional. By default, the same value than InterestCalculationMethod is used.
:return: enum DayCountBasis
"""
return self._get_enum_parameter(DayCountBasis, "accruedCalculationMethod")
@accrued_calculation_method.setter
def accrued_calculation_method(self, value):
self._set_enum_parameter(DayCountBasis, "accruedCalculationMethod", value)
@property
def adjust_interest_to_payment_date(self):
"""
A flag that indicates if the coupon dates are adjusted to the payment dates.
Optional. By default 'false' is used.
:return: enum AdjustInterestToPaymentDate
"""
return self._get_enum_parameter(AdjustInterestToPaymentDate, "adjustInterestToPaymentDate")
@adjust_interest_to_payment_date.setter
def adjust_interest_to_payment_date(self, value):
self._set_enum_parameter(AdjustInterestToPaymentDate, "adjustInterestToPaymentDate", value)
@property
def amortization_schedule(self):
"""
Definition of amortizations
:return: list AmortizationItem
"""
return self._get_list_parameter(AmortizationItem, "amortizationSchedule")
@amortization_schedule.setter
def amortization_schedule(self, value):
self._set_list_parameter(AmortizationItem, "amortizationSchedule", value)
@property
def direction(self):
"""
The direction of the leg. the possible values are:
- 'Paid' (the cash flows of the leg are paid to the counterparty),
- 'Received' (the cash flows of the leg are received from the counterparty).
Optional for a single leg instrument (like a bond), in that case default value
is Received. It is mandatory for a multi-instrument leg instrument (like Swap
or CDS leg).
:return: enum Direction
"""
return self._get_enum_parameter(Direction, "direction")
@direction.setter
def direction(self, value):
self._set_enum_parameter(Direction, "direction", value)
@property
def index_average_method(self):
"""
:return: enum IndexAverageMethod
"""
return self._get_enum_parameter(IndexAverageMethod, "indexAverageMethod")
@index_average_method.setter
def index_average_method(self, value):
self._set_enum_parameter(IndexAverageMethod, "indexAverageMethod", value)
@property
def index_compounding_method(self):
"""
A flag that defines how the coupon rate is calculated from the reset floating
rates when the reset frequency is higher than the interest payment frequency
(e.g. daily index reset with quarterly interest payment). The possible values
are:
- Compounded (uses the compounded average rate from multiple fixings),
- Average (uses the arithmetic average rate from multiple fixings),
- Constant (uses the last published rate among multiple fixings),
- AdjustedCompounded (uses Chinese 7-day repo fixing),
- MexicanCompounded (uses Mexican Bremse fixing). Optional. By default
'Constant' is used.
:return: enum IndexCompoundingMethod
"""
return self._get_enum_parameter(IndexCompoundingMethod, "indexCompoundingMethod")
@index_compounding_method.setter
def index_compounding_method(self, value):
self._set_enum_parameter(IndexCompoundingMethod, "indexCompoundingMethod", value)
@property
def index_reset_frequency(self):
"""
The reset frequency in case the leg Type is Float. Optional. By default the
IndexTenor is used.
:return: enum Frequency
"""
return self._get_enum_parameter(Frequency, "indexResetFrequency")
@index_reset_frequency.setter
def index_reset_frequency(self, value):
self._set_enum_parameter(Frequency, "indexResetFrequency", value)
@property
def interest_calculation_method(self):
"""
The Day Count Basis method used to calculate the coupon interest payments.
Mandatory.
:return: enum DayCountBasis
"""
return self._get_enum_parameter(DayCountBasis, "interestCalculationMethod")
@interest_calculation_method.setter
def interest_calculation_method(self, value):
self._set_enum_parameter(DayCountBasis, "interestCalculationMethod", value)
@property
def interest_payment_frequency(self):
"""
The frequency of the interest payments. Optional if an instrument code/style
have been defined : in that case, value comes from reference data. Otherwise, it
is mandatory.
:return: enum Frequency
"""
return self._get_enum_parameter(Frequency, "interestPaymentFrequency")
@interest_payment_frequency.setter
def interest_payment_frequency(self, value):
self._set_enum_parameter(Frequency, "interestPaymentFrequency", value)
@property
def interest_type(self):
"""
A flag that indicates whether the leg is fixed or float. Possible values are:
- 'Fixed' (the leg has a fixed coupon),
- 'Float' (the leg has a floating rate index). Mandatory.
:return: enum InterestType
"""
return self._get_enum_parameter(InterestType, "interestType")
@interest_type.setter
def interest_type(self, value):
self._set_enum_parameter(InterestType, "interestType", value)
@property
def payment_business_day_convention(self):
"""
The method to adjust dates to a working day. The possible values are:
- ModifiedFollowing (adjusts dates according to the Modified Following
convention - next business day unless is it goes into the next month,
preceeding is used in that case),
- NextBusinessDay (adjusts dates according to the Following convention - Next
Business Day),
- PreviousBusinessDay (adjusts dates according to the Preceeding convention -
Previous Business Day),
- NoMoving (does not adjust dates),
- BbswModifiedFollowing (adjusts dates according to the BBSW Modified Following
convention). Optional. In case an instrument code/style has been defined,
value comes from bond reference data. Otherwise 'ModifiedFollowing' is used.
:return: enum BusinessDayConvention
"""
return self._get_enum_parameter(BusinessDayConvention, "paymentBusinessDayConvention")
@payment_business_day_convention.setter
def payment_business_day_convention(self, value):
self._set_enum_parameter(BusinessDayConvention, "paymentBusinessDayConvention", value)
@property
def payment_roll_convention(self):
"""
Method to adjust payment dates when they fall at the end of the month (28th of
February, 30th, 31st). The possible values are:
- Last (For setting the calculated date to the last working day),
- Same (For setting the calculated date to the same day . In this latter case,
the date may be moved according to the date moving convention if it is a
non-working day),
- Last28 (For setting the calculated date to the last working day. 28FEB being
always considered as the last working day),
- Same28 (For setting the calculated date to the same day .28FEB being always
considered as the last working day). Optional. In case an instrument code has
been defined, value comes from bond reference data. Otherwise, 'SameDay' is
used.
:return: enum DateRollingConvention
"""
return self._get_enum_parameter(DateRollingConvention, "paymentRollConvention")
@payment_roll_convention.setter
def payment_roll_convention(self, value):
self._set_enum_parameter(DateRollingConvention, "paymentRollConvention", value)
@property
def stub_rule(self):
"""
The rule that defines whether coupon roll dates are aligned on the maturity or
the issue date. The possible values are:
- ShortFirstProRata (to create a short period between the start date and the
first coupon date, and pay a smaller amount of interest for the short
period.All coupon dates are calculated backward from the maturity date),
- ShortFirstFull (to create a short period between the start date and the first
coupon date, and pay a regular coupon on the first coupon date. All coupon
dates are calculated backward from the maturity date),
- LongFirstFull (to create a long period between the start date and the second
coupon date, and pay a regular coupon on the second coupon date. All coupon
dates are calculated backward from the maturity date),
- ShortLastProRata (to create a short period between the last payment date and
maturity, and pay a smaller amount of interest for the short period. All
coupon dates are calculated forward from the start date). This property may
also be used in conjunction with firstRegularPaymentDate and
lastRegularPaymentDate; in that case the following values can be defined:
- Issue (all dates are aligned on the issue date),
- Maturity (all dates are aligned on the maturity date). Optional. By default
'Maturity' is used.
:return: enum StubRule
"""
return self._get_enum_parameter(StubRule, "stubRule")
@stub_rule.setter
def stub_rule(self, value):
self._set_enum_parameter(StubRule, "stubRule", value)
@property
def end_date(self):
"""
Maturity date of the bond to override. Mandatory if instrument code has not been
defined and is_perpetual flag has been set to false. In case an instrument code
has been defined, value comes from bond reference data.
:return: str
"""
return self._get_parameter("endDate")
@end_date.setter
def end_date(self, value):
self._set_datetime_parameter("endDate", value)
@property
def first_accrual_date(self):
"""
Date at which bond starts accruing. Optional. In case an instrument code has
been defined, value comes from bond reference data. Otherwise default value is
the issue date of the bond.
:return: str
"""
return self._get_parameter("firstAccrualDate")
@first_accrual_date.setter
def first_accrual_date(self, value):
self._set_datetime_parameter("firstAccrualDate", value)
@property
def first_regular_payment_date(self):
"""
The first regular coupon payment date for leg with an odd first coupon.
Optional.
:return: str
"""
return self._get_parameter("firstRegularPaymentDate")
@first_regular_payment_date.setter
def first_regular_payment_date(self, value):
self._set_datetime_parameter("firstRegularPaymentDate", value)
@property
def fixed_rate_percent(self):
"""
The fixed coupon rate in percentage. It is mandatory in case of a single leg
instrument. Otherwise, in case of multi leg instrument, it can be computed as
the Par rate.
:return: float
"""
return self._get_parameter("fixedRatePercent")
@fixed_rate_percent.setter
def fixed_rate_percent(self, value):
self._set_parameter("fixedRatePercent", value)
@property
def floor_strike_percent(self):
"""
The contractual strike rate of the floor. the value is expressed in percentages.
if this parameter is set, the floor will apply to the leg with the same
parameters set in the swapLegDefinition (e.g.maturity, frequency, index,
discounting rule). no default value applies.
:return: float
"""
return self._get_parameter("floorStrikePercent")
@floor_strike_percent.setter
def floor_strike_percent(self, value):
self._set_parameter("floorStrikePercent", value)
@property
def index_fixing_lag(self):
"""
Defines the number of working days between the fixing date and the start of the
coupon period ('InAdvance') or the end of the coupon period ('InArrears').
Optional. By default 0 is used.
:return: int
"""
return self._get_parameter("indexFixingLag")
@index_fixing_lag.setter
def index_fixing_lag(self, value):
self._set_parameter("indexFixingLag", value)
@property
def index_fixing_ric(self):
"""
The RIC that carries the fixing value. This value overrides the RIC associated
by default with the IndexName and IndexTenor. Optional.
:return: str
"""
return self._get_parameter("indexFixingRic")
@index_fixing_ric.setter
def index_fixing_ric(self, value):
self._set_parameter("indexFixingRic", value)
@property
def instrument_code(self):
"""
Code to define the bond instrument. It can be an ISIN, a RIC, a CUSIP or an
AssetId .
:return: str
"""
return self._get_parameter("instrumentCode")
@instrument_code.setter
def instrument_code(self, value):
self._set_parameter("instrumentCode", value)
@property
def instrument_tag(self):
"""
User defined string to identify the instrument.It can be used to link output
results to the instrument definition. Only alphabetic, numeric and '- _.#=@'
characters are supported. Optional.
:return: str
"""
return self._get_parameter("instrumentTag")
@instrument_tag.setter
def instrument_tag(self, value):
self._set_parameter("instrumentTag", value)
@property
def interest_payment_delay(self):
"""
The number of working days between the end of coupon period and the actual
interest payment date. Optional. By default no delay (0) is applied.
:return: int
"""
return self._get_parameter("interestPaymentDelay")
@interest_payment_delay.setter
def interest_payment_delay(self, value):
self._set_parameter("interestPaymentDelay", value)
@property
def is_perpetual(self):
"""
Flag the defines wether the bond is perpetual or not in case of user defined
bond. Optional. In case an instrument code has been defined, value comes from
bond reference data. In case of user defined bond, default value is 'false'.
:return: bool
"""
return self._get_parameter("isPerpetual")
@is_perpetual.setter
def is_perpetual(self, value):
self._set_parameter("isPerpetual", value)
@property
def issue_date(self):
"""
Date of issuance of the bond to override. Mandatory if instrument code has not
been defined. In case an instrument code has been defined, value comes from bond
reference data.
:return: str
"""
return self._get_parameter("issueDate")
@issue_date.setter
def issue_date(self, value):
self._set_datetime_parameter("issueDate", value)
@property
def last_regular_payment_date(self):
"""
The last regular coupon payment date for leg with an odd last coupon. Optional.
:return: str
"""
return self._get_parameter("lastRegularPaymentDate")
@last_regular_payment_date.setter
def last_regular_payment_date(self, value):
self._set_datetime_parameter("lastRegularPaymentDate", value)
@property
def notional_amount(self):
"""
The notional amount of the leg at the period start date. Optional. By default
1,000,000 is used.
:return: float
"""
return self._get_parameter("notionalAmount")
@notional_amount.setter
def notional_amount(self, value):
self._set_parameter("notionalAmount", value)
@property
def notional_ccy(self):
"""
The ISO code of the notional currency. Mandatory if instrument code or
instrument style has not been defined. In case an instrument code/style has been
defined, value may comes from the reference data.
:return: str
"""
return self._get_parameter("notionalCcy")
@notional_ccy.setter
def notional_ccy(self, value):
self._set_parameter("notionalCcy", value)
@property
def payment_business_days(self):
"""
A list of coma-separated calendar codes to adjust dates (e.g. 'EMU' or 'USA').
Optional. By default the calendar associated to notional_ccy is used.
:return: str
"""
return self._get_parameter("paymentBusinessDays")
@payment_business_days.setter
def payment_business_days(self, value):
self._set_parameter("paymentBusinessDays", value)
@property
def spread_bp(self):
"""
The spread in basis point that is added to the floating rate index index value.
Optional. By default 0 is used.
:return: float
"""
return self._get_parameter("spreadBp")
@spread_bp.setter
def spread_bp(self, value):
self._set_parameter("spreadBp", value)
@property
def template(self):
"""
A reference to a Adfin instrument contract or the Adfin detailed contract.
Optional. Either instrument_code, template, or full definition must be provided.
:return: str
"""
return self._get_parameter("template")
@template.setter
def template(self, value):
self._set_parameter("template", value)
@property
def index_observation_method(self):
return self._get_enum_parameter(IndexObservationMethod, "indexObservationMethod")
@index_observation_method.setter
def index_observation_method(self, value):
self._set_enum_parameter(IndexObservationMethod, "indexObservationMethod", value)
@property
def fixed_rate_percent_schedule(self):
return self._get_parameter("fixedRatePercentSchedule")
@fixed_rate_percent_schedule.setter
def fixed_rate_percent_schedule(self, value):
self._set_parameter("fixedRatePercentSchedule", value)
@property
def instrument_type(self):
return self._instrument_type | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/bond/_bond_definition.py | 0.948692 | 0.336713 | _bond_definition.py | pypi |
from typing import Optional, Union, TYPE_CHECKING
from ._bond_definition import BondInstrumentDefinition
from ._bond_pricing_parameters import PricingParameters
from ..._enums import (
AdjustInterestToPaymentDate,
BusinessDayConvention,
DayCountBasis,
Direction,
Frequency,
IndexCompoundingMethod,
InterestType,
DateRollingConvention,
StubRule,
IndexAverageMethod,
)
from ..._models import AmortizationItem
from .._base_definition import BaseDefinition
from .._contracts_data_provider import bond_instrument_code_arg_parser
from ..._enums import IndexObservationMethod
from ....._tools import validate_types, try_copy_to_list
if TYPE_CHECKING:
from ....._types import ExtendedParams, OptStrStrs, OptDateTime
class Definition(BaseDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
instrument_code : str, optional
Code to define the bond instrument.
instrument_tag : str, optional
User defined string to identify the instrument. It can be used to link output
results to the instrument definition. Only alphabetic, numeric and '- _.#=@'
characters are supported.
end_date : str or date or datetime or timedelta, optional
Maturity date of the bond to override. Mandatory if instrument code has not been
defined and is_perpetual flag has been set to false. In case an instrument code
has been defined, value comes from bond reference data.
direction : Direction or str, optional
The direction of the leg. Optional for a single leg instrument (like a bond),
in that case default value is Received. It is mandatory for a multi-instrument
leg instrument (like Swap or CDS leg).
interest_type : InterestType or str, optional
A flag that indicates whether the leg is fixed or float.
notional_ccy : str, optional
The ISO code of the notional currency. Mandatory if instrument code or
instrument style has not been defined. In case an instrument code/style has been
defined, value may comes from the reference data.
notional_amount : float, optional
The notional amount of the leg at the period start date.
By default 1,000,000 is used.
fixed_rate_percent : float, optional
The fixed coupon rate in percentage. It is mandatory in case of a single leg
instrument. Otherwise, in case of multi leg instrument, it can be computed as
the Par rate.
spread_bp : float, optional
The spread in basis point that is added to the floating rate index index value.
By default 0 is used.
interest_payment_frequency : Frequency or str, optional
The frequency of the interest payments. Optional if an instrument code/style
have been defined : in that case, value comes from reference data. Otherwise, it
is mandatory.
interest_calculation_method : DayCountBasis or str, optional
The Day Count Basis method used to calculate the coupon interest payments.
accrued_calculation_method : DayCountBasis or str, optional
The Day Count Basis method used to calculate the accrued interest payments.
By default, the same value than interest_calculation_method is used.
payment_business_day_convention : BusinessDayConvention or str, optional
The method to adjust dates to a working day.
In case an instrument code/style has been defined, value comes from bond
reference data. Otherwise 'ModifiedFollowing' is used.
payment_roll_convention : DateRollingConvention or str, optional
Method to adjust payment dates when they fall at the end of the month (28th of
February, 30th, 31st). In case an instrument code has been defined,
value comes from bond reference data. Otherwise, 'SameDay' is used.
index_reset_frequency : Frequency or str, optional
The reset frequency in case the leg Type is Float.
By default, the IndexTenor is used.
index_fixing_lag : int, optional
Defines the number of working days between the fixing date and the start of the
coupon period ('InAdvance') or the end of the coupon period ('InArrears').
By default 0 is used.
first_regular_payment_date : str or date or datetime or timedelta, optional
The first regular coupon payment date for leg with an odd first coupon.
last_regular_payment_date : str or date or datetime or timedelta, optional
The last regular coupon payment date for leg with an odd last coupon.
amortization_schedule : AmortizationItem, optional
Definition of amortizations.
payment_business_days : str, optional
A list of coma-separated calendar codes to adjust dates (e.g. 'EMU' or 'USA').
By default the calendar associated to notional_ccy is used.
adjust_interest_to_payment_date : AdjustInterestToPaymentDate or str, optional
A flag that indicates if the coupon dates are adjusted to the payment dates.
By default, 'false' is used.
index_compounding_method : IndexCompoundingMethod or str, optional
A flag that defines how the coupon rate is calculated from the reset floating
rates when the reset frequency is higher than the interest payment frequency
(e.g. daily index reset with quarterly interest payment).
By default 'Constant' is used.
interest_payment_delay : int, optional
The number of working days between the end of coupon period and the actual
interest payment date.
By default, no delay (0) is applied.
stub_rule : StubRule or str, optional
The rule that defines whether coupon roll dates are aligned on the maturity or
the issue date.
By default, 'Maturity' is used.
issue_date : str or date or datetime or timedelta, optional
Date of issuance of the bond to override. Mandatory if instrument code has not
been defined. In case an instrument code has been defined, value comes from bond
reference data.
index_average_method : or str, optional
The value of the average index calculation method. The possible values are:
ArithmeticAverage, CompoundedActual, CompoundedAverageRate, DailyCompoundedAverage
first_accrual_date : str or date or datetime or timedelta, optional
Date at which bond starts accruing. In case an instrument code has
been defined, value comes from bond reference data. Otherwise, default value is
the issue date of the bond.
floor_strike_percent : float, optional
The contractual strike rate of the floor. The value is expressed in percentages.
If this parameter is set, the floor will apply to the leg with the same
parameters set in the swapLegDefinition (e.g.maturity, frequency, index,
discounting rule). No default value applies.
index_fixing_ric : str, optional
The RIC that carries the fixing value. This value overrides the RIC associated
by default with the IndexName and IndexTenor.
is_perpetual : bool, optional
Flag the defines wether the bond is perpetual or not in case of user defined
bond. In case an instrument code has been defined, value comes from
bond reference data. In case of user defined bond, default value is 'false'.
template : str, optional
A reference to a Adfin instrument contract or the Adfin detailed contract.
Either instrument_code, template, or full definition must be provided.
fields: list of str, optional
Contains the list of Analytics that the quantitative analytic service will
compute.
pricing_parameters : PricingParameters, optional
The pricing parameters to apply to this instrument. If pricing
parameters are not provided at this level parameters defined globally at the
request level are used. If no pricing parameters are provided globally default
values apply.
extended_params : dict, optional
If necessary other parameters.
index_observation_method : IndexObservationMethod or str, optional
(RFR) Method for determining the accrual observation period.
fixed_rate_percent_schedule : dict, optional
The step structure: a list of pre-determined future coupon rates indexed by their dates.
Either fixedRatePercent or fixedRatePercentSchedule is used. No default value applies.
instrument_type : str, optional
Instrument type definition for bond.
Optional. Definition can be provided. Otherwise, default value is "Bond".
Methods
-------
get_data(session=session, on_response=on_response)
Returns a response to the data platform
get_data_async(session=None, on_response=None, async_mode=None)
Returns a response asynchronously to the data platform
get_stream(session=session)
Get stream quantitative analytic service subscription
Examples
--------
>>> import refinitiv.data.content.ipa.financial_contracts as rdf
>>> definition = rdf.bond.Definition(
... issue_date = "2002-02-28",
... end_date = "2032-02-28",
... notional_ccy = "USD",
... interest_payment_frequency = "Annual",
... fixed_rate_percent = 7,
... interest_calculation_method = rdf.bond.DayCountBasis.DCB_ACTUAL_ACTUAL
... )
>>> response = definition.get_data()
>>> import refinitiv.data.content.ipa.financial_contracts as rdf
>>> definition = rdf.bond.Definition(
... fields = ["YieldPercent", "Duration", "RedemptionDateType", "RedemptionDate"],
... instrument_type = "Bond",
... instrument_code="250847EF3=TWBL",
... pricing_parameters=PricingParameters(
... redemption_date_type="RedemptionAtCallDate",
... price = 100,
... trade_date = "2020-05-01"
... )
... )
>>> response = definition.get_data()
Using get_data_async
>>> import asyncio
>>> task = definition.get_data_async()
>>> response = asyncio.run(task)
Using get_stream
>>> response = definition.get_stream()
"""
def __init__(
self,
instrument_code: Optional[str] = None,
instrument_tag: Optional[str] = None,
end_date: "OptDateTime" = None,
direction: Union[Direction, str] = None,
interest_type: Union[InterestType, str] = None,
notional_ccy: Optional[str] = None,
notional_amount: Optional[float] = None,
fixed_rate_percent: Optional[float] = None,
spread_bp: Optional[float] = None,
interest_payment_frequency: Union[Frequency, str] = None,
interest_calculation_method: Union[DayCountBasis, str] = None,
accrued_calculation_method: Union[DayCountBasis, str] = None,
payment_business_day_convention: Union[BusinessDayConvention, str] = None,
payment_roll_convention: Union[DateRollingConvention, str] = None,
index_reset_frequency: Union[Frequency, str] = None,
index_fixing_lag: Optional[int] = None,
first_regular_payment_date: "OptDateTime" = None,
last_regular_payment_date: "OptDateTime" = None,
amortization_schedule: Optional[AmortizationItem] = None,
payment_business_days: Optional[str] = None,
adjust_interest_to_payment_date: Union[AdjustInterestToPaymentDate, str] = None,
index_compounding_method: Union[IndexCompoundingMethod, str] = None,
interest_payment_delay: Optional[int] = None,
stub_rule: Union[StubRule, str] = None,
issue_date: "OptDateTime" = None,
index_average_method: Union[IndexAverageMethod, str] = None,
first_accrual_date: "OptDateTime" = None,
floor_strike_percent: Optional[float] = None,
index_fixing_ric: Optional[str] = None,
is_perpetual: Optional[bool] = None,
template: Optional[str] = None,
fields: "OptStrStrs" = None,
pricing_parameters: Optional[PricingParameters] = None,
extended_params: "ExtendedParams" = None,
index_observation_method: Union[IndexObservationMethod, str] = None,
fixed_rate_percent_schedule: Optional[dict] = None,
instrument_type: Optional[str] = "Bond",
) -> None:
if instrument_code:
instrument_code = bond_instrument_code_arg_parser.get_str(instrument_code)
validate_types(index_fixing_lag, [int, type(None)], "index_fixing_lag")
validate_types(interest_payment_delay, [int, type(None)], "interest_payment_delay")
fields = try_copy_to_list(fields)
definition = BondInstrumentDefinition(
accrued_calculation_method=accrued_calculation_method,
adjust_interest_to_payment_date=adjust_interest_to_payment_date,
amortization_schedule=amortization_schedule,
direction=direction,
index_average_method=index_average_method,
index_compounding_method=index_compounding_method,
index_reset_frequency=index_reset_frequency,
interest_calculation_method=interest_calculation_method,
interest_payment_frequency=interest_payment_frequency,
interest_type=interest_type,
payment_business_day_convention=payment_business_day_convention,
payment_roll_convention=payment_roll_convention,
stub_rule=stub_rule,
end_date=end_date,
first_accrual_date=first_accrual_date,
first_regular_payment_date=first_regular_payment_date,
fixed_rate_percent=fixed_rate_percent,
floor_strike_percent=floor_strike_percent,
index_fixing_lag=index_fixing_lag,
index_fixing_ric=index_fixing_ric,
instrument_code=instrument_code,
instrument_tag=instrument_tag,
interest_payment_delay=interest_payment_delay,
is_perpetual=is_perpetual,
issue_date=issue_date,
last_regular_payment_date=last_regular_payment_date,
notional_amount=notional_amount,
notional_ccy=notional_ccy,
payment_business_days=payment_business_days,
spread_bp=spread_bp,
template=template,
index_observation_method=index_observation_method,
fixed_rate_percent_schedule=fixed_rate_percent_schedule,
instrument_type=instrument_type,
)
super().__init__(
definition=definition,
fields=fields,
pricing_parameters=pricing_parameters,
extended_params=extended_params,
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/bond/_definition.py | 0.944664 | 0.574037 | _definition.py | pypi |
from typing import Optional
from ._repo_underlying_pricing_parameters import UnderlyingPricingParameters
from .._instrument_definition import InstrumentDefinition
from ..._object_definition import ObjectDefinition
class UnderlyingContract(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
instrument_type : str, optional
The type of instrument being defined.
instrument_definition : object, optional
Definition of the input contract
pricing_parameters : UnderlyingPricingParameters, optional
The pricing parameters to apply to this instrument. If pricing parameters are
not provided at this level parameters defined globally at the request level are
used. If no pricing parameters are provided globally default values apply.
Examples
--------
>>> import refinitiv.data.content.ipa.financial_contracts as rdf
>>> rdf.repo.UnderlyingContract(
... instrument_type="Bond",
... instrument_definition=rdf.bond.Definition(instrument_code="US191450264="),
...)
"""
def __init__(
self,
instrument_type: Optional[str] = None,
instrument_definition: Optional[object] = None,
pricing_parameters: Optional[UnderlyingPricingParameters] = None,
) -> None:
super().__init__()
self.instrument_type = instrument_type
self.instrument_definition = instrument_definition
self.pricing_parameters = pricing_parameters
@property
def instrument_definition(self):
"""
Definition of the input contract
:return: object InstrumentDefinition
"""
return self._get_object_parameter(InstrumentDefinition, "instrumentDefinition")
@instrument_definition.setter
def instrument_definition(self, value):
self._set_object_parameter(InstrumentDefinition, "instrumentDefinition", value)
@property
def pricing_parameters(self):
"""
The pricing parameters to apply to this instrument. Optional.
If pricing parameters are not provided at this level parameters defined globally at the request level are used. If no
pricing parameters are provided globally default values apply.
:return: object RepoUnderlyingPricingParameters
"""
return self._get_object_parameter(UnderlyingPricingParameters, "pricingParameters")
@pricing_parameters.setter
def pricing_parameters(self, value):
self._set_object_parameter(UnderlyingPricingParameters, "pricingParameters", value)
@property
def instrument_type(self):
"""
The type of instrument being defined.
:return: str
"""
return self._get_parameter("instrumentType")
@instrument_type.setter
def instrument_type(self, value):
self._set_parameter("instrumentType", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/repo/_repo_underlying_contract.py | 0.953068 | 0.386735 | _repo_underlying_contract.py | pypi |
from typing import Optional
from .._instrument_definition import ObjectDefinition
class RepoParameters(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
coupon_paid_at_horizon : bool, optional
Flag that defines whether coupons paid at horizon. this has no impact on
pricing.
haircut_rate_percent : float, optional
The reduction applied to the value of an underlying asset for purposes of
calculating a repo collateral. the value is computed as [(initialmarginpercent -
100) / initialmarginpercent] and expressed in percentages. either haircut or
initial marging field can be bet. optional. by default it is computed from
initialmarginpercent.
initial_margin_percent : float, optional
The initial market value of collateral expressed as a percentage of the purchase
price of the underlying asset. either haircutratepercent or initialmarginpercent
can be overriden. optional. default value is 100.
purchase_price : float, optional
Purchase price of the asset. this parameter can be used to solve repurchaseprice
from this purchaseprice value. optional. by default it is computed from net
present value and initial margin.
repurchase_price : float, optional
Repurchase price of the asset. this parameter can be used to solve purchaseprice
from this repurchaseprice value. optional. by default it is computed from
underlying end price, or solved from purchaseprice and repo rate.
"""
def __init__(
self,
coupon_paid_at_horizon: Optional[bool] = None,
haircut_rate_percent: Optional[float] = None,
initial_margin_percent: Optional[float] = None,
purchase_price: Optional[float] = None,
repurchase_price: Optional[float] = None,
) -> None:
super().__init__()
self.coupon_paid_at_horizon = coupon_paid_at_horizon
self.haircut_rate_percent = haircut_rate_percent
self.initial_margin_percent = initial_margin_percent
self.purchase_price = purchase_price
self.repurchase_price = repurchase_price
@property
def coupon_paid_at_horizon(self):
"""
Flag that defines whether coupons paid at horizon. this has no impact on
pricing.
:return: bool
"""
return self._get_parameter("couponPaidAtHorizon")
@coupon_paid_at_horizon.setter
def coupon_paid_at_horizon(self, value):
self._set_parameter("couponPaidAtHorizon", value)
@property
def haircut_rate_percent(self):
"""
The reduction applied to the value of an underlying asset for purposes of
calculating a repo collateral. the value is computed as [(initialmarginpercent -
100) / initialmarginpercent] and expressed in percentages. either haircut or
initial marging field can be bet. optional. by default it is computed from
initialmarginpercent.
:return: float
"""
return self._get_parameter("haircutRatePercent")
@haircut_rate_percent.setter
def haircut_rate_percent(self, value):
self._set_parameter("haircutRatePercent", value)
@property
def initial_margin_percent(self):
"""
The initial market value of collateral expressed as a percentage of the purchase
price of the underlying asset. either haircutratepercent or initialmarginpercent
can be overriden. optional. default value is 100.
:return: float
"""
return self._get_parameter("initialMarginPercent")
@initial_margin_percent.setter
def initial_margin_percent(self, value):
self._set_parameter("initialMarginPercent", value)
@property
def purchase_price(self):
"""
Purchase price of the asset. this parameter can be used to solve repurchaseprice
from this purchaseprice value. optional. by default it is computed from net
present value and initial margin.
:return: float
"""
return self._get_parameter("purchasePrice")
@purchase_price.setter
def purchase_price(self, value):
self._set_parameter("purchasePrice", value)
@property
def repurchase_price(self):
"""
Repurchase price of the asset. this parameter can be used to solve purchaseprice
from this repurchaseprice value. optional. by default it is computed from
underlying end price, or solved from purchaseprice and repo rate.
:return: float
"""
return self._get_parameter("repurchasePrice")
@repurchase_price.setter
def repurchase_price(self, value):
self._set_parameter("repurchasePrice", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/repo/_repo_parameters.py | 0.96209 | 0.529811 | _repo_parameters.py | pypi |
from typing import Optional, Union
from ....._types import OptDateTime
from .._instrument_definition import InstrumentDefinition
from ..._enums import (
BuySell,
DayCountBasis,
)
from ._repo_underlying_contract import UnderlyingContract
class RepoInstrumentDefinition(InstrumentDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
instrument_tag : str, optional
User defined string to identify the instrument.it can be used to link output
results to the instrument definition. only alphabetic, numeric and '- _.#=@'
characters are supported. optional.
start_date : str or date or datetime or timedelta, optional
Start date of the repo, that means when the underlying security is exchanged.
mandatory.
end_date : str or date or datetime or timedelta, optional
End date of the repo, that means when the borrower repurchases the security
back. either enddate or tenor field are requested.
tenor : str, optional
Tenor that defines the duration of the repo in case no enddate has been
provided. in that case, enddate is computed from startdate and tenor. either
enddate or tenor field are requested.
buy_sell : BuySell or str, optional
The indicator of the deal side. the possible values are: buy: buying the repo,
sell: selling the repo. optional. the default value is "buy".
day_count_basis : DayCountBasis or str, optional
Day count basis convention to apply to the custom repo rate. optional,
"dcb_actual_360" by default.
underlying_instruments : RepoUnderlyingContract, optional
Definition of the underlying instruments. only bond contracts are supported for
now, and only one bond can be used. mandatory.
is_coupon_exchanged : bool, optional
Specifies whether or not intermediate coupons are exchanged.
- couponexchanged = true to specify that intermediate coupons for the underlying
bond (between the repo start date and repo end date) are exchanged between the
repo seller and repo buyer.
- couponexchanged = false to specify that no intermediate coupons are exchanged
between the repo seller and repo buyer. in this case the repo instrument is
like a standard loan with no intermediate coupons; the bond is only used as a
warranty in case the money borrower defaults. optional. true by default, which
means coupon exchanged.
repo_rate_percent : float, optional
Custom repo rate in percentage. if not provided in the request, it will be
computed by interpolating/extrapolating a repo curve. optional.
"""
def __init__(
self,
instrument_tag: Optional[str] = None,
start_date: "OptDateTime" = None,
end_date: "OptDateTime" = None,
tenor: Optional[str] = None,
buy_sell: Union[BuySell, str] = None,
day_count_basis: Union[DayCountBasis, str] = None,
underlying_instruments: Optional[UnderlyingContract] = None,
is_coupon_exchanged: Optional[bool] = None,
repo_rate_percent: Optional[float] = None,
) -> None:
super().__init__()
self.instrument_tag = instrument_tag
self.start_date = start_date
self.end_date = end_date
self.tenor = tenor
self.buy_sell = buy_sell
self.day_count_basis = day_count_basis
self.underlying_instruments = underlying_instruments
self.is_coupon_exchanged = is_coupon_exchanged
self.repo_rate_percent = repo_rate_percent
def get_instrument_type(self):
return "Repo"
@property
def buy_sell(self):
"""
The indicator of the deal side. the possible values are: buy: buying the repo,
sell: selling the repo. optional. the default value is "buy".
:return: enum BuySell
"""
return self._get_enum_parameter(BuySell, "buySell")
@buy_sell.setter
def buy_sell(self, value):
self._set_enum_parameter(BuySell, "buySell", value)
@property
def day_count_basis(self):
"""
Day count basis convention to apply to the custom repo rate. optional,
"dcb_actual_360" by default.
:return: enum DayCountBasis
"""
return self._get_enum_parameter(DayCountBasis, "dayCountBasis")
@day_count_basis.setter
def day_count_basis(self, value):
self._set_enum_parameter(DayCountBasis, "dayCountBasis", value)
@property
def underlying_instruments(self):
"""
Definition of the underlying instruments. only bond contracts are supported for
now, and only one bond can be used. mandatory.
:return: list RepoUnderlyingContract
"""
return self._get_list_parameter(UnderlyingContract, "underlyingInstruments")
@underlying_instruments.setter
def underlying_instruments(self, value):
self._set_list_parameter(UnderlyingContract, "underlyingInstruments", value)
@property
def end_date(self):
"""
End date of the repo, that means when the borrower repurchases the security
back. either enddate or tenor field are requested.
:return: str
"""
return self._get_parameter("endDate")
@end_date.setter
def end_date(self, value):
self._set_datetime_parameter("endDate", value)
@property
def instrument_tag(self):
"""
User defined string to identify the instrument.it can be used to link output
results to the instrument definition. only alphabetic, numeric and '- _.#=@'
characters are supported. optional.
:return: str
"""
return self._get_parameter("instrumentTag")
@instrument_tag.setter
def instrument_tag(self, value):
self._set_parameter("instrumentTag", value)
@property
def is_coupon_exchanged(self):
"""
Specifies whether or not intermediate coupons are exchanged.
- couponexchanged = true to specify that intermediate coupons for the underlying
bond (between the repo start date and repo end date) are exchanged between the
repo seller and repo buyer.
- couponexchanged = false to specify that no intermediate coupons are exchanged
between the repo seller and repo buyer. in this case the repo instrument is
like a standard loan with no intermediate coupons; the bond is only used as a
warranty in case the money borrower defaults. optional. true by default, which
means coupon exchanged.
:return: bool
"""
return self._get_parameter("isCouponExchanged")
@is_coupon_exchanged.setter
def is_coupon_exchanged(self, value):
self._set_parameter("isCouponExchanged", value)
@property
def repo_rate_percent(self):
"""
Custom repo rate in percentage. if not provided in the request, it will be
computed by interpolating/extrapolating a repo curve. optional.
:return: float
"""
return self._get_parameter("repoRatePercent")
@repo_rate_percent.setter
def repo_rate_percent(self, value):
self._set_parameter("repoRatePercent", value)
@property
def start_date(self):
"""
Start date of the repo, that means when the underlying security is exchanged.
mandatory.
:return: str
"""
return self._get_parameter("startDate")
@start_date.setter
def start_date(self, value):
self._set_datetime_parameter("startDate", value)
@property
def tenor(self):
"""
Tenor that defines the duration of the repo in case no enddate has been
provided. in that case, enddate is computed from startdate and tenor. either
enddate or tenor field are requested.
:return: str
"""
return self._get_parameter("tenor")
@tenor.setter
def tenor(self, value):
self._set_parameter("tenor", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/repo/_repo_definition.py | 0.949937 | 0.433082 | _repo_definition.py | pypi |
from typing import Optional
from ....._types import OptDateTime
from ..._object_definition import ObjectDefinition
from ._repo_parameters import RepoParameters
from ..bond import PricingParameters as BondPricingParameters
class UnderlyingPricingParameters(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
pricing_parameters_at_end : BondPricingParameters, optional
pricing_parameters_at_start : BondPricingParameters, optional
repo_parameters : RepoParameters, optional
market_data_date : str or date or datetime or timedelta, optional
The date at which the market data is retrieved. the value is expressed in iso
8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). it
should be less or equal tovaluationdate). optional. by
default,marketdatadateisvaluationdateor today.
report_ccy : str, optional
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). it is set for the fields ending with 'xxxinreportccy'. optional. the
default value is the notional currency.
valuation_date : str or date or datetime or timedelta, optional
The date at which the instrument is valued. the value is expressed in iso 8601
format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). by default,
marketdatadate is used. if marketdatadate is not specified, the default value is
today.
"""
def __init__(
self,
pricing_parameters_at_end: Optional[BondPricingParameters] = None,
pricing_parameters_at_start: Optional[BondPricingParameters] = None,
repo_parameters: Optional[RepoParameters] = None,
market_data_date: "OptDateTime" = None,
report_ccy: Optional[str] = None,
valuation_date: "OptDateTime" = None,
) -> None:
super().__init__()
self.pricing_parameters_at_end = pricing_parameters_at_end
self.pricing_parameters_at_start = pricing_parameters_at_start
self.repo_parameters = repo_parameters
self.market_data_date = market_data_date
self.report_ccy = report_ccy
self.valuation_date = valuation_date
@property
def pricing_parameters_at_end(self):
"""
:return: object BondPricingParameters
"""
return self._get_object_parameter(BondPricingParameters, "pricingParametersAtEnd")
@pricing_parameters_at_end.setter
def pricing_parameters_at_end(self, value):
self._set_object_parameter(BondPricingParameters, "pricingParametersAtEnd", value)
@property
def pricing_parameters_at_start(self):
"""
:return: object BondPricingParameters
"""
return self._get_object_parameter(BondPricingParameters, "pricingParametersAtStart")
@pricing_parameters_at_start.setter
def pricing_parameters_at_start(self, value):
self._set_object_parameter(BondPricingParameters, "pricingParametersAtStart", value)
@property
def repo_parameters(self):
"""
:return: object RepoParameters
"""
return self._get_object_parameter(RepoParameters, "repoParameters")
@repo_parameters.setter
def repo_parameters(self, value):
self._set_object_parameter(RepoParameters, "repoParameters", value)
@property
def market_data_date(self):
"""
The date at which the market data is retrieved. the value is expressed in iso
8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). it
should be less or equal tovaluationdate). optional. by
default,marketdatadateisvaluationdateor today.
:return: str
"""
return self._get_parameter("marketDataDate")
@market_data_date.setter
def market_data_date(self, value):
self._set_datetime_parameter("marketDataDate", value)
@property
def report_ccy(self):
"""
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). it is set for the fields ending with 'xxxinreportccy'. optional. the
default value is the notional currency.
:return: str
"""
return self._get_parameter("reportCcy")
@report_ccy.setter
def report_ccy(self, value):
self._set_parameter("reportCcy", value)
@property
def valuation_date(self):
"""
The date at which the instrument is valued. the value is expressed in iso 8601
format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). by default,
marketdatadate is used. if marketdatadate is not specified, the default value is
today.
:return: str
"""
return self._get_parameter("valuationDate")
@valuation_date.setter
def valuation_date(self, value):
self._set_datetime_parameter("valuationDate", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/repo/_repo_underlying_pricing_parameters.py | 0.944293 | 0.41941 | _repo_underlying_pricing_parameters.py | pypi |
from typing import Optional, Union
from ....._types import OptDateTime
from ..._enums import RepoCurveType
from ..._object_definition import ObjectDefinition
class PricingParameters(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
repo_curve_type : RepoCurveType or str, optional
Curve used to compute the repo rate. it can be computed using following methods:
- repocurve : rate is computed by interpolating a repo curve. - depositcurve
: rate is computed by interpolating a deposit curve. - fixinglibor : rate is
computed by interpolating libor rates. if no curve can be found, the rate is
computed using a deposit curve.
market_data_date : str or date or datetime or timedelta, optional
The date at which the market data is retrieved. the value is expressed in iso
8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). it
should be less or equal tovaluationdate). optional. by
default,marketdatadateisvaluationdateor today.
report_ccy : str, optional
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). it is set for the fields ending with 'xxxinreportccy'. optional. the
default value is the notional currency.
valuation_date : str or date or datetime or timedelta, optional
The date at which the instrument is valued. the value is expressed in iso 8601
format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). by default,
marketdatadate is used. if marketdatadate is not specified, the default value is
today.
"""
def __init__(
self,
repo_curve_type: Union[RepoCurveType, str] = None,
market_data_date: "OptDateTime" = None,
report_ccy: Optional[str] = None,
valuation_date: "OptDateTime" = None,
) -> None:
super().__init__()
self.repo_curve_type = repo_curve_type
self.market_data_date = market_data_date
self.report_ccy = report_ccy
self.valuation_date = valuation_date
@property
def repo_curve_type(self):
"""
Curve used to compute the repo rate. it can be computed using following methods:
- repocurve : rate is computed by interpolating a repo curve. - depositcurve
: rate is computed by interpolating a deposit curve. - fixinglibor : rate is
computed by interpolating libor rates. if no curve can be found, the rate is
computed using a deposit curve.
:return: enum RepoCurveType
"""
return self._get_enum_parameter(RepoCurveType, "repoCurveType")
@repo_curve_type.setter
def repo_curve_type(self, value):
self._set_enum_parameter(RepoCurveType, "repoCurveType", value)
@property
def market_data_date(self):
"""
The date at which the market data is retrieved. the value is expressed in iso
8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). it
should be less or equal tovaluationdate). optional. by
default,marketdatadateisvaluationdateor today.
:return: str
"""
return self._get_parameter("marketDataDate")
@market_data_date.setter
def market_data_date(self, value):
self._set_datetime_parameter("marketDataDate", value)
@property
def report_ccy(self):
"""
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). it is set for the fields ending with 'xxxinreportccy'. optional. the
default value is the notional currency.
:return: str
"""
return self._get_parameter("reportCcy")
@report_ccy.setter
def report_ccy(self, value):
self._set_parameter("reportCcy", value)
@property
def valuation_date(self):
"""
The date at which the instrument is valued. the value is expressed in iso 8601
format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). by default,
marketdatadate is used. if marketdatadate is not specified, the default value is
today.
:return: str
"""
return self._get_parameter("valuationDate")
@valuation_date.setter
def valuation_date(self, value):
self._set_datetime_parameter("valuationDate", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/repo/_repo_pricing_parameters.py | 0.969324 | 0.494141 | _repo_pricing_parameters.py | pypi |
from typing import Optional, List, TYPE_CHECKING, Union
from ._repo_definition import RepoInstrumentDefinition
from ._repo_pricing_parameters import PricingParameters
from ._repo_underlying_contract import UnderlyingContract
from .._base_definition import BaseDefinition
from ..._enums import DayCountBasis, BuySell
from ....._tools import create_repr, try_copy_to_list
if TYPE_CHECKING:
from ....._types import ExtendedParams, OptStrStrs, OptDateTime
class Definition(BaseDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
instrument_tag : str, optional
User defined string to identify the instrument.It can be used to link output
results to the instrument definition. Only alphabetic, numeric and '- _.#=@'
characters are supported.
start_date : str or date or datetime or timedelta, optional
Start date of the repo, that means when the the underlying security is
exchanged.
end_date : str or date or datetime or timedelta, optional
End date of the repo, that means when the borrower repurchases the security
back. Either end_date or tenor field are requested.
tenor : str, optional
tenor that defines the duration of the Repo in case no end_date has been
provided. In that case, end_date is computed from start_date and tenor. Either
end_date or tenor field are requested.
buy_sell : BuySell or str, optional
The indicator of the deal side. Optional. the default value is "buy".
day_count_basis : DayCountBasis or str, optional
Day Count Basis convention to apply to the custom Repo rate.
By default "Dcb_Actual_360".
underlying_instruments : list of UnderlyingContract
Definition of the underlying instruments. Only Bond Contracts are supported for
now, and only one Bond can be used.
is_coupon_exchanged : bool, optional
Specifies whether or not intermediate coupons are exchanged.
- CouponExchanged = True to specify that intermediate coupons for the underlying
bond (between the repo start date and repo end date) are exchanged between the
repo seller and repo buyer.
- CouponExchanged = False to specify that no intermediate coupons are exchanged
between the repo seller and repo buyer. In this case the repo instrument is
like a standard loan with no intermediate coupons; the bond is only used as a
warranty in case the money borrower defaults. True by default, which means
coupon exchanged.
repo_rate_percent : float, optional
Custom Repo Rate in percentage. If not provided in the request, it will be
computed by interpolating/extrapolating a Repo Curve.
fields: list of str, optional
Contains the list of Analytics that the quantitative analytic service will
compute.
pricing_parameters : PricingParameters, optional
The pricing parameters to apply to this instrument. If pricing parameters
are not provided at this level parameters defined globally at the request
level are used. If no pricing parameters are provided globally default
values apply.
extended_params : dict, optional
If necessary other parameters
Methods
-------
get_data(session=session, on_response=on_response, async_mode=None)
Returns a response to the data platform
get_data_async(session=session, on_response=on_response, async_mode=None)
Returns a response to the async data platform
get_stream(session=session)
Get stream quantitative analytic service subscription
Examples
--------
>>> import refinitiv.data.content.ipa.financial_contracts as rdf
>>> definition = rdf.repo.Definition(
... underlying_instruments=[
... rdf.repo.UnderlyingContract(
... instrument_type="Bond",
... instrument_definition=rdf.bond.Definition(instrument_code="US191450264="),
... )],
...)
>>> response = definition.get_data()
Using get_stream
>>> stream = definition.get_stream()
>>> stream.open()
"""
def __init__(
self,
instrument_tag: Optional[str] = None,
start_date: "OptDateTime" = None,
end_date: "OptDateTime" = None,
tenor: Optional[str] = None,
buy_sell: Union[BuySell, str] = None,
day_count_basis: Union[DayCountBasis, str] = None,
underlying_instruments: Optional[List[UnderlyingContract]] = None,
is_coupon_exchanged: Optional[bool] = None,
repo_rate_percent: Optional[float] = None,
fields: "OptStrStrs" = None,
pricing_parameters: Optional[PricingParameters] = None,
extended_params: "ExtendedParams" = None,
) -> None:
self.underlying_instruments = try_copy_to_list(underlying_instruments)
fields = try_copy_to_list(fields)
definition = RepoInstrumentDefinition(
buy_sell=buy_sell,
day_count_basis=day_count_basis,
underlying_instruments=underlying_instruments,
end_date=end_date,
instrument_tag=instrument_tag,
is_coupon_exchanged=is_coupon_exchanged,
repo_rate_percent=repo_rate_percent,
start_date=start_date,
tenor=tenor,
)
super().__init__(
definition=definition,
fields=fields,
pricing_parameters=pricing_parameters,
extended_params=extended_params,
)
def __repr__(self):
return create_repr(
self,
middle_path="content.ipa.financial_contract",
content=f"{{underlying_instruments='{self.underlying_instruments}'}}",
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/repo/_definition.py | 0.943971 | 0.370055 | _definition.py | pypi |
from typing import Optional, List, Union
from ....._types import OptDateTime
from . import BermudanSwaptionDefinition
from ..._enums import (
BuySell,
ExerciseStyle,
PremiumSettlementType,
SwaptionSettlementType,
SwaptionType,
)
from ..._models import InputFlow
from .. import swap
from .._instrument_definition import InstrumentDefinition
from ..swap._swap_definition import SwapInstrumentDefinition
class SwaptionInstrumentDefinition(InstrumentDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
instrument_tag : str, optional
A user defined string to identify the instrument. it can be used to link output
results to the instrument definition.limited to 40 characters.only alphabetic,
numeric and '- _.#=@' characters are supported. optional. no default value
applies.
start_date : str or date or datetime or timedelta, optional
The date the swaption starts. optional. by default it is derived from the
tradedate and the day to spot convention of the contract currency.
end_date : str or date or datetime or timedelta, optional
The maturity or expiry date of the instrument's leg. the value is expressed in
iso 8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g. 2021-01-01t00:00:00z). either
tenor or enddate must be provided. the default value is valuationdate shifted
forward by tenor.
tenor : str, optional
The code indicating the period between startdate and enddate of the instrument
(e.g. '6m', '1y'). mandatory, if enddate is not provided. the default value is
calculated from enddate.
notional_amount : float, optional
The notional amount of the instrument. the default value is '1,000,000'.
bermudan_swaption_definition : BermudanSwaptionDefinition, optional
buy_sell : BuySell or str, optional
The indicator of the deal side. the possible values are: buy: buying the
option, sell: selling/writing the option. no default value applies.
exercise_style : ExerciseStyle or str, optional
The option style based on its exercise restrictions. the possible values are:
amer, euro, berm. note: all exercise styles may not apply to certain option
no default value applies.
payments : InputFlow, optional
An array of payments
premium_settlement_type : PremiumSettlementType or str, optional
The cash settlement type of the option premium spot, forward.
settlement_type : SwaptionSettlementType or str, optional
The settlement method for options when exercised. the possible values are:
physical: delivering the underlying asset, or for a swaption, physically
entering into the underlying swap. cash: paying out in cash. the default
value is 'physical'.
swaption_type : SwaptionType or str, optional
The indicator if the swaption is a payer or a receiver. the possible values are:
receiver: a right to receive a fixed rate of the underlying swap, payer: a
right to pay a fixed rate of the underlying swap. no default value applies.
underlying_definition : SwapDefinition, optional
spread_vs_atm_in_bp : float, optional
Spread between strike and atm strike, expressed in basis points (bp).
strike_percent : float, optional
The set price at which the owner of the option can buy or sell the underlying
asset. for a swaption, it is the fixed rate of the underlying swap at which the
owner of the swaption can enter the swap. the value is expressed in percentages.
by default, fixedratepercent of the underlying swap is used.
"""
def __init__(
self,
instrument_tag: Optional[str] = None,
start_date: "OptDateTime" = None,
end_date: "OptDateTime" = None,
tenor: Optional[str] = None,
notional_amount: Optional[float] = None,
bermudan_swaption_definition: Optional[BermudanSwaptionDefinition] = None,
buy_sell: Union[BuySell, str] = None,
exercise_style: Union[ExerciseStyle, str] = None,
payments: Optional[List[InputFlow]] = None,
premium_settlement_type: Union[PremiumSettlementType, str] = None,
settlement_type: Union[SwaptionSettlementType, str] = None,
swaption_type: Union[SwaptionType, str] = None,
underlying_definition: Optional[swap.Definition] = None,
spread_vs_atm_in_bp: Optional[float] = None,
strike_percent: Optional[float] = None,
delivery_date: "OptDateTime" = None,
) -> None:
super().__init__()
self.instrument_tag = instrument_tag
self.start_date = start_date
self.end_date = end_date
self.tenor = tenor
self.notional_amount = notional_amount
self.bermudan_swaption_definition = bermudan_swaption_definition
self.buy_sell = buy_sell
self.exercise_style = exercise_style
self.payments = payments
self.premium_settlement_type = premium_settlement_type
self.settlement_type = settlement_type
self.swaption_type = swaption_type
self.underlying_definition = underlying_definition
self.spread_vs_atm_in_bp = spread_vs_atm_in_bp
self.strike_percent = strike_percent
self.delivery_date = delivery_date
def get_instrument_type(self):
return "Swaption"
@property
def bermudan_swaption_definition(self):
"""
:return: object BermudanSwaptionDefinition
"""
return self._get_object_parameter(BermudanSwaptionDefinition, "bermudanSwaptionDefinition")
@bermudan_swaption_definition.setter
def bermudan_swaption_definition(self, value):
self._set_object_parameter(BermudanSwaptionDefinition, "bermudanSwaptionDefinition", value)
@property
def buy_sell(self):
"""
The side of the deal.
:return: enum BuySell
"""
return self._get_enum_parameter(BuySell, "buySell")
@buy_sell.setter
def buy_sell(self, value):
self._set_enum_parameter(BuySell, "buySell", value)
@property
def exercise_style(self):
"""
:return: enum ExerciseStyle
"""
return self._get_enum_parameter(ExerciseStyle, "exerciseStyle")
@exercise_style.setter
def exercise_style(self, value):
self._set_enum_parameter(ExerciseStyle, "exerciseStyle", value)
@property
def payments(self):
"""
An array of payments
:return: list InputFlow
"""
return self._get_list_parameter(InputFlow, "payments")
@payments.setter
def payments(self, value):
self._set_list_parameter(InputFlow, "payments", value)
@property
def premium_settlement_type(self):
"""
The cash settlement type of the option premium spot, forward.
:return: enum PremiumSettlementType
"""
return self._get_enum_parameter(PremiumSettlementType, "premiumSettlementType")
@premium_settlement_type.setter
def premium_settlement_type(self, value):
self._set_enum_parameter(PremiumSettlementType, "premiumSettlementType", value)
@property
def settlement_type(self):
"""
The settlement type of the option if the option is exercised.
:return: enum SwaptionSettlementType
"""
return self._get_enum_parameter(SwaptionSettlementType, "settlementType")
@settlement_type.setter
def settlement_type(self, value):
self._set_enum_parameter(SwaptionSettlementType, "settlementType", value)
@property
def swaption_type(self):
"""
The indicator if the swaption is a payer or a receiver. the possible values are:
receiver: a right to receive a fixed rate of the underlying swap, payer: a
right to pay a fixed rate of the underlying swap. no default value applies.
:return: enum SwaptionType
"""
return self._get_enum_parameter(SwaptionType, "swaptionType")
@swaption_type.setter
def swaption_type(self, value):
self._set_enum_parameter(SwaptionType, "swaptionType", value)
@property
def underlying_definition(self):
"""
:return: object SwapDefinition
"""
return self._get_object_parameter(SwapInstrumentDefinition, "underlyingDefinition")
@underlying_definition.setter
def underlying_definition(self, value):
self._set_object_parameter(SwapInstrumentDefinition, "underlyingDefinition", value)
@property
def end_date(self):
"""
The maturity or expiry date of the instrument's leg. the value is expressed in
iso 8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g. 2021-01-01t00:00:00z). either
tenor or enddate must be provided. the default value is valuationdate shifted
forward by tenor.
:return: str
"""
return self._get_parameter("endDate")
@end_date.setter
def end_date(self, value):
self._set_datetime_parameter("endDate", value)
@property
def instrument_tag(self):
"""
A user defined string to identify the instrument. it can be used to link output
results to the instrument definition.limited to 40 characters.only alphabetic,
numeric and '- _.#=@' characters are supported. optional. no default value
applies.
:return: str
"""
return self._get_parameter("instrumentTag")
@instrument_tag.setter
def instrument_tag(self, value):
self._set_parameter("instrumentTag", value)
@property
def notional_amount(self):
"""
The notional amount of the instrument. The default value is '1,000,000'.
:return: float
"""
return self._get_parameter("notionalAmount")
@notional_amount.setter
def notional_amount(self, value):
self._set_parameter("notionalAmount", value)
@property
def spread_vs_atm_in_bp(self):
"""
Spread between strike and atm strike, expressed in basis points (bp).
:return: float
"""
return self._get_parameter("spreadVsAtmInBp")
@spread_vs_atm_in_bp.setter
def spread_vs_atm_in_bp(self, value):
self._set_parameter("spreadVsAtmInBp", value)
@property
def start_date(self):
"""
The date the swaption starts. optional. by default it is derived from the
tradedate and the day to spot convention of the contract currency.
:return: str
"""
return self._get_parameter("startDate")
@start_date.setter
def start_date(self, value):
self._set_datetime_parameter("startDate", value)
@property
def strike_percent(self):
"""
The set price at which the owner of the option can buy or sell the underlying
asset. for a swaption, it is the fixed rate of the underlying swap at which the
owner of the swaption can enter the swap. the value is expressed in percentages.
by default, fixedratepercent of the underlying swap is used.
:return: float
"""
return self._get_parameter("strikePercent")
@strike_percent.setter
def strike_percent(self, value):
self._set_parameter("strikePercent", value)
@property
def tenor(self):
"""
The code indicating the period between startdate and enddate of the instrument
(e.g. '6m', '1y'). mandatory, if enddate is not provided. the default value is
calculated from enddate.
:return: str
"""
return self._get_parameter("tenor")
@tenor.setter
def tenor(self, value):
self._set_parameter("tenor", value)
@property
def delivery_date(self):
return self._get_parameter("deliveryDate")
@delivery_date.setter
def delivery_date(self, value):
self._set_datetime_parameter("deliveryDate", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/swaption/_swaption_definition.py | 0.928165 | 0.520984 | _swaption_definition.py | pypi |
from typing import Optional, Union
from ....._types import OptDateTime
from ..._object_definition import ObjectDefinition
from ..._enums import PriceSide
class PricingParameters(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
price_side : PriceSide or str, optional
The quoted price side of the instrument. optional. default value is 'mid'.
exercise_date : str or date or datetime or timedelta, optional
market_data_date : str or date or datetime or timedelta, optional
The date at which the market data is retrieved. the value is expressed in iso
8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). it
should be less or equal tovaluationdate). optional. by
default,marketdatadateisvaluationdateor today.
market_value_in_deal_ccy : float, optional
The market value of the instrument. the value is expressed in the deal currency.
optional. no default value applies. note that premium takes priority over
volatility input.
nb_iterations : int, optional
The number of steps for the bermudan swaption pricing via the hull-white
one-factor (hw1f) tree. no default value applies.
report_ccy : str, optional
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). it is set for the fields ending with 'xxxinreportccy'. optional. the
default value is the notional currency.
simulate_exercise : bool, optional
Tells if in case of future cashflows should be considered as exercised or not.
possible values: true, false.
valuation_date : str or date or datetime or timedelta, optional
The valuation date for pricing. If not set the valuation date is equal to
market_data_date or Today. For assets that contains a settlementConvention,
the default valuation date is equal to the settlementdate of the Asset that is
usually the TradeDate+SettlementConvention.
Examples
--------
>>> import refinitiv.data.content.ipa.financial_contracts as rdf
>>> rdf.swaption.PricingParameters(valuation_date="2020-04-24", nb_iterations=80)
"""
def __init__(
self,
price_side: Union[PriceSide, str] = None,
exercise_date: "OptDateTime" = None,
market_data_date: "OptDateTime" = None,
market_value_in_deal_ccy: Optional[float] = None,
nb_iterations: Optional[int] = None,
report_ccy: Optional[str] = None,
simulate_exercise: Optional[bool] = None,
valuation_date: "OptDateTime" = None,
) -> None:
super().__init__()
self.price_side = price_side
self.exercise_date = exercise_date
self.market_data_date = market_data_date
self.market_value_in_deal_ccy = market_value_in_deal_ccy
self.nb_iterations = nb_iterations
self.report_ccy = report_ccy
self.simulate_exercise = simulate_exercise
self.valuation_date = valuation_date
@property
def price_side(self):
"""
The quoted price side of the instrument. optional. default value is 'mid'.
:return: enum PriceSide
"""
return self._get_enum_parameter(PriceSide, "priceSide")
@price_side.setter
def price_side(self, value):
self._set_enum_parameter(PriceSide, "priceSide", value)
@property
def exercise_date(self):
"""
:return: str
"""
return self._get_parameter("exerciseDate")
@exercise_date.setter
def exercise_date(self, value):
self._set_datetime_parameter("exerciseDate", value)
@property
def market_data_date(self):
"""
The market data date for pricing.
By default, the marketDataDate date is the ValuationDate or Today.
:return: str
"""
return self._get_parameter("marketDataDate")
@market_data_date.setter
def market_data_date(self, value):
self._set_datetime_parameter("marketDataDate", value)
@property
def market_value_in_deal_ccy(self):
"""
MarketValueInDealCcy to override and that will be used as pricing analysis input to compute VolatilityPercent.
No override is applied by default. Note that Premium takes priority over Volatility input.
:return: float
"""
return self._get_parameter("marketValueInDealCcy")
@market_value_in_deal_ccy.setter
def market_value_in_deal_ccy(self, value):
self._set_parameter("marketValueInDealCcy", value)
@property
def nb_iterations(self):
"""
Used for Bermudans and HW1F tree.
:return: int
"""
return self._get_parameter("nbIterations")
@nb_iterations.setter
def nb_iterations(self, value):
self._set_parameter("nbIterations", value)
@property
def report_ccy(self):
"""
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). it is set for the fields ending with 'xxxinreportccy'. optional. the
default value is the notional currency.
:return: str
"""
return self._get_parameter("reportCcy")
@report_ccy.setter
def report_ccy(self, value):
self._set_parameter("reportCcy", value)
@property
def simulate_exercise(self):
"""
Tells if in case of future cashflows should be considered as exercised or not.
possible values: true, false.
:return: bool
"""
return self._get_parameter("simulateExercise")
@simulate_exercise.setter
def simulate_exercise(self, value):
self._set_parameter("simulateExercise", value)
@property
def valuation_date(self):
"""
The valuation date for pricing.
If not set the valuation date is equal to MarketDataDate or Today.
For assets that contains a settlementConvention, the default valuation date is equal to
the settlementdate of the Asset that is usually the TradeDate+SettlementConvention.
:return: str
"""
return self._get_parameter("valuationDate")
@valuation_date.setter
def valuation_date(self, value):
self._set_datetime_parameter("valuationDate", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/swaption/_swaption_pricing_parameters.py | 0.950995 | 0.507934 | _swaption_pricing_parameters.py | pypi |
from typing import Optional, TYPE_CHECKING, Union
from ._bermudan_swaption_definition import BermudanSwaptionDefinition
from ._swaption_definition import SwaptionInstrumentDefinition
from ._swaption_pricing_parameters import PricingParameters
from .. import swap
from .._base_definition import BaseDefinition
from ..._enums import (
BuySell,
ExerciseStyle,
SwaptionSettlementType,
PremiumSettlementType,
SwaptionType,
)
from ..._models import InputFlow
from ....._tools import create_repr, try_copy_to_list
if TYPE_CHECKING:
from ....._types import ExtendedParams, OptStrStrs, OptDateTime
class Definition(BaseDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
instrument_tag : str, optional
A user defined string to identify the instrument. it can be used to link output
results to the instrument definition.limited to 40 characters.only alphabetic,
numeric and '- _.#=@' characters are supported. optional. no default value
applies.
start_date : str or date or datetime or timedelta, optional
The date the swaption starts. optional. by default it is derived from the
tradedate and the day to spot convention of the contract currency.
end_date : str or date or datetime or timedelta, optional
The maturity or expiry date of the instrument's leg. the value is expressed in
iso 8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g. 2021-01-01t00:00:00z). either
tenor or enddate must be provided. the default value is valuationdate shifted
forward by tenor.
tenor : str, optional
The code indicating the period between startdate and enddate of the instrument
(e.g. '6m', '1y'). mandatory, if enddate is not provided. the default value is
calculated from enddate.
notional_amount : float, optional
The notional amount of the instrument. the default value is '1,000,000'.
bermudan_swaption_definition : BermudanSwaptionDefinition, optional
buy_sell : BuySell, optional
The indicator of the deal side. the possible values are: buy: buying the
option, sell: selling/writing the option. no default value applies.
exercise_style : ExerciseStyle, optional
The option style based on its exercise restrictions. the possible values are:
amer, euro, berm. note: all exercise styles may not apply to certain option
no default value applies.
payments : InputFlow, optional
An array of payments
premium_settlement_type : PremiumSettlementType or str, optional
The cash settlement type of the option premium spot, forward.
settlement_type : SwaptionSettlementType or str, optional
The settlement method for options when exercised. the possible values are:
physical: delivering the underlying asset, or for a swaption, physically
entering into the underlying swap. cash: paying out in cash. the default
value is 'physical'.
swaption_type : SwaptionType or str, optional
The indicator if the swaption is a payer or a receiver. the possible values are:
receiver: a right to receive a fixed rate of the underlying swap, payer: a
right to pay a fixed rate of the underlying swap. no default value applies.
underlying_definition : SwapDefinition, optional
spread_vs_atm_in_bp : float, optional
Spread between strike and atm strike, expressed in basis points (bp).
strike_percent : float, optional
The set price at which the owner of the option can buy or sell the underlying
asset. for a swaption, it is the fixed rate of the underlying swap at which the
owner of the swaption can enter the swap. the value is expressed in percentages.
by default, fixedratepercent of the underlying swap is used.
fields: list of str, optional
Contains the list of Analytics that the quantitative analytic service will
compute.
pricing_parameters : PricingParameters, optional
The pricing parameters to apply to this instrument. If pricing parameters
are not provided at this level parameters defined globally at the request
level are used. If no pricing parameters are provided globally default
values apply.
extended_params : dict, optional
If necessary other parameters.
delivery_date : str or date or datetime or timedelta, optional
The date when the underlying asset is delivered.
The value is expressed in ISO 8601 format: YYYY-MM-DDT[hh]:[mm]:[ss]Z (e.g. '2021-01-01T00:00:00Z').
Methods
-------
get_data(session=session, on_response=on_response, async_mode=None)
Returns a response to the data platform
get_data_async(session=session, on_response=on_response, async_mode=None)
Returns a response to the async data platform
get_stream(session=session)
Get stream quantitative analytic service subscription
Examples
--------
>>> import refinitiv.data.content.ipa.financial_contracts as rdf
>>> definition = rdf.swaption.Definition(
... buy_sell=rdf.swaption.BuySell.BUY,
... call_put=rdf.swaption.CallPut.CALL,
... exercise_style=rdf.swaption.ExerciseStyle.BERM,
... underlying_definition=rdf.swap.Definition(tenor="3Y", template="EUR_AB6E"),
...)
>>> response = definition.get_data()
Using get_stream
>>> stream = definition.get_stream()
>>> stream.open()
"""
def __init__(
self,
instrument_tag: Optional[str] = None,
start_date: "OptDateTime" = None,
end_date: "OptDateTime" = None,
tenor: Optional[str] = None,
notional_amount: Optional[float] = None,
bermudan_swaption_definition: Optional[BermudanSwaptionDefinition] = None,
buy_sell: Optional[BuySell] = None,
exercise_style: Optional[ExerciseStyle] = None,
payments: Optional[InputFlow] = None,
premium_settlement_type: Union[PremiumSettlementType, str] = None,
settlement_type: Union[SwaptionSettlementType, str] = None,
swaption_type: Union[SwaptionType, str] = None,
underlying_definition: Optional[swap.Definition] = None,
spread_vs_atm_in_bp: Optional[float] = None,
strike_percent: Optional[float] = None,
fields: "OptStrStrs" = None,
pricing_parameters: Optional[PricingParameters] = None,
extended_params: "ExtendedParams" = None,
delivery_date: "OptDateTime" = None,
):
fields = try_copy_to_list(fields)
self.underlying_definition = underlying_definition
definition = SwaptionInstrumentDefinition(
bermudan_swaption_definition=bermudan_swaption_definition,
buy_sell=buy_sell,
exercise_style=exercise_style,
payments=payments,
premium_settlement_type=premium_settlement_type,
settlement_type=settlement_type,
swaption_type=swaption_type,
underlying_definition=underlying_definition,
end_date=end_date,
instrument_tag=instrument_tag,
notional_amount=notional_amount,
spread_vs_atm_in_bp=spread_vs_atm_in_bp,
start_date=start_date,
strike_percent=strike_percent,
tenor=tenor,
delivery_date=delivery_date,
)
super().__init__(
definition=definition,
fields=fields,
pricing_parameters=pricing_parameters,
extended_params=extended_params,
)
def __repr__(self):
return create_repr(
self,
middle_path="content.ipa.financial_contracts.swaption",
content=f"{{underlying_definition='{self.underlying_definition}'}}",
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/swaption/_definition.py | 0.936212 | 0.44571 | _definition.py | pypi |
from typing import Optional
from ....._types import OptDateTime
from ._swap_leg_definition import LegDefinition
from .._instrument_definition import InstrumentDefinition
class SwapInstrumentDefinition(InstrumentDefinition):
def __init__(
self,
instrument_tag: Optional[str] = None,
instrument_code: Optional[str] = None,
trade_date: "OptDateTime" = None,
start_date: "OptDateTime" = None,
end_date: "OptDateTime" = None,
tenor: Optional[str] = None,
legs: Optional[LegDefinition] = None,
is_non_deliverable: Optional[bool] = None,
settlement_ccy: Optional[str] = None,
start_tenor: Optional[str] = None,
template: Optional[str] = None,
) -> None:
super().__init__()
self.instrument_tag = instrument_tag
self.instrument_code = instrument_code
self.trade_date = trade_date
self.start_date = start_date
self.end_date = end_date
self.tenor = tenor
self.legs = legs
self.is_non_deliverable = is_non_deliverable
self.settlement_ccy = settlement_ccy
self.start_tenor = start_tenor
self.template = template
def get_instrument_type(self):
return "Swap"
@property
def legs(self):
"""
The legs of the Swap to provide a full definition of the swap if no template or instrumentCode have been provided.
Optional. Either InstrumentCode, Template, or Legs must be provided.
:return: list SwapLegDefinition
"""
return self._get_list_parameter(LegDefinition, "legs")
@legs.setter
def legs(self, value):
self._set_list_parameter(LegDefinition, "legs", value)
@property
def end_date(self):
"""
The maturity date of the swap contract.
Mandatory. Either the endDate or the tenor must be provided.
:return: str
"""
return self._get_parameter("endDate")
@end_date.setter
def end_date(self, value):
self._set_datetime_parameter("endDate", value)
@property
def instrument_code(self):
"""
A swap RIC that is used to retrieve the description of the swap contract.
Optional. Either instrumentCode, template, or legs must be provided.
:return: str
"""
return self._get_parameter("instrumentCode")
@instrument_code.setter
def instrument_code(self, value):
self._set_parameter("instrumentCode", value)
@property
def is_non_deliverable(self):
"""
A flag that indicates if the swap is non-deliverable.
Optional. By defaults 'false'.
:return: bool
"""
return self._get_parameter("isNonDeliverable")
@is_non_deliverable.setter
def is_non_deliverable(self, value):
self._set_parameter("isNonDeliverable", value)
@property
def settlement_ccy(self):
"""
For non-deliverable instrument, the ISO code of the settlement currency.
Optional. By priority order : 'USD' if one leg denominated in USD; 'EUR' if one leg is denominated in EUR; the paidLegCcy.
:return: str
"""
return self._get_parameter("settlementCcy")
@settlement_ccy.setter
def settlement_ccy(self, value):
self._set_parameter("settlementCcy", value)
@property
def start_date(self):
"""
The date the swap starts accruing interest. Its effective date.
Optional. By default, it is derived from the TradeDate and the day to spot convention of the contract currency.
:return: str
"""
return self._get_parameter("startDate")
@start_date.setter
def start_date(self, value):
self._set_datetime_parameter("startDate", value)
@property
def start_tenor(self):
"""
The code indicating the period from a spot date to startdate of the instrument
(e.g. '1m'). no default value applies.
:return: str
"""
return self._get_parameter("startTenor")
@start_tenor.setter
def start_tenor(self, value):
self._set_parameter("startTenor", value)
@property
def template(self):
"""
A reference to a common swap contract.
Optional. Either InstrumentCode, Template, or Legs must be provided.
:return: str
"""
return self._get_parameter("template")
@template.setter
def template(self, value):
self._set_parameter("template", value)
@property
def tenor(self):
"""
The period code that represents the time between the start date and end date the contract.
Mandatory. Either the endDate or the tenor must be provided.
:return: str
"""
return self._get_parameter("tenor")
@tenor.setter
def tenor(self, value):
self._set_parameter("tenor", value)
@property
def trade_date(self):
"""
The date the swap contract was created.
Optional. By default, the valuation date.
:return: str
"""
return self._get_parameter("tradeDate")
@trade_date.setter
def trade_date(self, value):
self._set_datetime_parameter("tradeDate", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/swap/_swap_definition.py | 0.938752 | 0.327507 | _swap_definition.py | pypi |
from typing import Optional, List, Union
from ..._enums import (
AdjustInterestToPaymentDate,
BusinessDayConvention,
DateRollingConvention,
DayCountBasis,
Direction,
Frequency,
IndexAverageMethod,
IndexCompoundingMethod,
IndexObservationMethod,
IndexResetType,
IndexSpreadCompoundingMethod,
InterestCalculationConvention,
InterestType,
NotionalExchange,
StubRule,
PriceSide,
)
from ..._models import AmortizationItem
from ..._object_definition import ObjectDefinition
from ....._tools import try_copy_to_list
from ....._types import OptDateTime
class LegDefinition(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
instrument_tag : str, optional
leg_tag : str, optional
A user-defined string to identify the direction of the leg: 'paid' or
'received'. optional. no default value applies.
direction : Direction or str, optional
The indication whether the cash flows of the instrument's leg are paid or
received. the possible values are: paid: the cash flows are paid to the
counterparty, received: the cash flows are received from the counterparty. no
default value applies.
interest_type : InterestType or str, optional
An indicator whether the instrument pays a fixed or floating interest. the
possible values are: fixed, float. no default value applies.
notional_ccy : str, optional
The currency of the instrument's notional amount. the value is expressed in iso
4217 alphabetical format (e.g. 'usd'). no default value applies.
notional_amount : float, optional
The notional amount of the instrument's leg. the default value is '1,000,000'.
fixed_rate_percent : float, optional
The interest rate of the instrument. the value is expressed in percentages.
mandatory if no instrumentcode is defined. if instrumentcode is defined, the
value comes from the instrument reference data.
index_name : str, optional
The name of the floating rate index (e.g. 'euribor'). no default value applies.
index_tenor : str, optional
The period code indicating the maturity of the floating rate index. the default
value is the tenor equivalent toindexresetfrequency or interestpaymentfrequency.
spread_bp : float, optional
The interest spread in basis points that is added to the floating rate index
value. optional. if instrumentcode is defined, the value comes from the
instrument reference data. in case of a user-defined instrument, the default
value is '0'.
interest_payment_frequency : Frequency or str, optional
The frequency of the interest payment. either indexresetfrequency or
interestpaymentfrequency must be provided (e.g. annual, semiannual). the
default value is indexresetfrequency.
interest_calculation_method : DayCountBasis or str, optional
The day count basis method used to calculate the interest payments(e.g.
dcb_30_360, dcb_30_actual). the default value is selected based onnotionalccy.
accrued_calculation_method : DayCountBasis or str, optional
The day count basis method used to calculate the accrued interest payments (e.g.
dcb_30_360, dcb_30_actual). if instrumentcode is defined, the value comes from
the instrument reference data. in case of a user-defined instrument,
interestcalculationmethod is used.
payment_business_day_convention : BusinessDayConvention or str, optional
The method to adjust dates to working days. the possible values are:
previousbusinessday, nextbusinessday, modified following, nomoving,
bbswmodifiedfollowing. if instrumentcode is defined, the value comes from the
instrument reference data. in case of a user-defined instrument, the default
value is'modifiedfollowing'.
payment_roll_convention : DateRollingConvention or str, optional
The method to adjust payment dates when they fall at the end of the month (e.g.
28th of february, 30th, 31st). the possible values are: last, same,
last28, same28. if instrumentcode is defined, the value comes from the
instrument reference data. in case of a user-defined instrument, the default
value is'same'.
index_reset_frequency : Frequency or str, optional
The reset frequency for the floating instrument (e.g. annual, semiannual). the
default value is interestpaymentfrequency.
index_reset_type : IndexResetType or str, optional
A type indicating if the floating rate index is reset before the coupon period
starts or at the end of the coupon period. the possible values are: inadvance:
resets the index before the start of the interest period, inarrears: resets the
index at the end of the interest period. the default value is 'inadvance'.
index_fixing_lag : int, optional
The number of working daysbetween the fixing date and the start of the coupon
period ('inadvance') or the end of the coupon period ('inarrears'). the
inadvance/inarrears mode is set in the indexresettype parameter. the default
value is the fixing lag associated to the index defined/determined by default on
the floating instrument.
first_regular_payment_date : str or date or datetime or timedelta, optional
The first regular interest payment date used for the odd first interest period.
the value is expressed in iso 8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g.
2021-01-01t00:00:00z). no default value applies.
last_regular_payment_date : str or date or datetime or timedelta, optional
The last regular interest payment date used for the odd last interest period.
the value is expressed in iso 8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g.
2021-01-01t00:00:00z). no default value applies.
amortization_schedule : AmortizationItem, optional
The amortization schedule of the instrument. it contains the following
information: startdate, enddate, remainingnotional,
amortizationfrequency, amount, amortizationtype. optional. no default value
applies.
payment_business_days : str, optional
A list of comma-separated calendar codes to adjust dates (e.g. 'emu' or 'usa').
the default value is the calendar associated to the market conventions of the
interestpaymentccy for the corresponding leg.
notional_exchange : NotionalExchange or str, optional
An indicator if the notional amount is exchanged and when it is exchanged. the
possible values are: none, start, end, both, endadjustment. the
default value is 'none'.
adjust_interest_to_payment_date : AdjustInterestToPaymentDate or str, optional
An indication if the coupon dates are adjusted to the payment dates. the
possible values are: adjusted, unadjusted. if instrumentcode is defined,
the value comes from the instrument reference data. in case of a user-defined
instrument, the default value is 'adjusted'.
index_compounding_method : IndexCompoundingMethod or str, optional
The method how the interest rate is calculated from the reset floating rates
when the reset frequency is higher than the interest payment frequency (e.g.
daily index reset with quarterly interest payments). the possible values are:
compounded, average, constant, adjustedcompounded, mexicancompounded. if
instrumentcode is defined, the value comes from the instrument reference data.
in case of a user-defined instrument, the default value is 'constant'.
interest_payment_delay : int, optional
The number of working days between the end of the interest accrual period and
the interest payment date. by default, no delay (0) is applied.
stub_rule : StubRule or str, optional
The rule that defines whether coupon roll dates are aligned to the maturity or
issue date. the possible values are: issue, maturity, shortfirstprorata,
shortfirstfull, longfirstfull, shortlastprorata. the default value is
'maturity'.
index_average_method : IndexAverageMethod or str, optional
The value of the average index calculation method. the possible values are:
compoundedactual, dailycompoundedaverage, compoundedaveragerate,
arithmeticaverage
index_observation_method : IndexObservationMethod or str, optional
(rfr) method for determining the accrual observation period. the possible values
are: lookback: use the interest period for both rate accrual and interest
payment. periodshift: use the observation period for both rate accrual and
interest payment. mixed: use the observation period for rate accrual and
the interest period for interest payment.
index_spread_compounding_method : IndexSpreadCompoundingMethod or str, optional
The method defining how the computed float leg spread is applied to compounded
rate. it applies only when indexcompoundingmethod= compounded. the possible
values are: isdacompounding, nocompounding, isdaflatcompounding. the
default value is 'isdacompounding'.
interest_calculation_convention : InterestCalculationConvention or str, optional
The day count basis method convention used to calculate the interest payments.
optional. defaults to moneymarket. if instrumentcode is defined, the value comes
from the instrument reference data.
cms_template : str, optional
A reference to a common swap contract that represents the underlying swap in
case of a constant maturity swap contract (cms). example: eur_ab6e. no default
value applies.
floor_strike_percent : float, optional
The contractual strike rate of the floor. the value is expressed in percentages.
if this parameter is set, the floor will apply to the leg with the same
parameters set in the swaplegdefinition (e.g.maturity, frequency, index,
discounting rule). no default value applies.
index_fixing_ric : str, optional
The ric that carries the fixing value if the instrument has a floating interest.
optional. mandatory for floating rate instruments if no instrumentcode is
defined. if instrumentcode is defined, the value comes from the instrument
reference data. no default value applies.
upfront_amount : float, optional
The amount which represents the net present value of the swap. it is computed as
[(100 dirtypricepercent / 100) x notionalamount]. the value is expressed in
upfrontamountccy. by default, no payment (0) applies.
index_price_side : PriceSide or str, optional
The side that is selected for an index supporting Bid/Ask/Mid (which is the case of deposits).
fixed_rate_percent_schedule : dict, optional
The step structure: a list of pre-determined future coupon rates indexed by their dates.
Either fixedRatePercent or fixedRatePercentSchedule is used.
No default value applies.
"""
def __init__(
self,
instrument_tag: Optional[str] = None,
leg_tag: Optional[str] = None,
direction: Union[Direction, str] = None,
interest_type: Union[InterestType, str] = None,
notional_ccy: Optional[str] = None,
notional_amount: Optional[float] = None,
fixed_rate_percent: Optional[float] = None,
index_name: Optional[str] = None,
index_tenor: Optional[str] = None,
spread_bp: Optional[float] = None,
interest_payment_frequency: Union[Frequency, str] = None,
interest_calculation_method: Union[DayCountBasis, str] = None,
accrued_calculation_method: Union[DayCountBasis, str] = None,
payment_business_day_convention: Union[BusinessDayConvention, str] = None,
payment_roll_convention: Union[DateRollingConvention, str] = None,
index_reset_frequency: Union[Frequency, str] = None,
index_reset_type: Union[IndexResetType, str] = None,
index_fixing_lag: Optional[int] = None,
first_regular_payment_date: "OptDateTime" = None,
last_regular_payment_date: "OptDateTime" = None,
amortization_schedule: Optional[List[AmortizationItem]] = None,
payment_business_days: Optional[str] = None,
notional_exchange: Union[NotionalExchange, str] = None,
adjust_interest_to_payment_date: Union[AdjustInterestToPaymentDate, str] = None,
index_compounding_method: Union[IndexCompoundingMethod, str] = None,
interest_payment_delay: Optional[int] = None,
stub_rule: Union[StubRule, str] = None,
index_average_method: Union[IndexAverageMethod, str] = None,
index_observation_method: Union[IndexObservationMethod, str] = None,
index_spread_compounding_method: Union[IndexSpreadCompoundingMethod, str] = None,
interest_calculation_convention: Union[InterestCalculationConvention, str] = None,
cms_template: Optional[str] = None,
floor_strike_percent: Optional[float] = None,
index_fixing_ric: Optional[str] = None,
upfront_amount: Optional[float] = None,
index_price_side: Union[PriceSide, str] = None,
fixed_rate_percent_schedule: Optional[dict] = None,
) -> None:
super().__init__()
self.instrument_tag = instrument_tag
self.leg_tag = leg_tag
self.direction = direction
self.interest_type = interest_type
self.notional_ccy = notional_ccy
self.notional_amount = notional_amount
self.fixed_rate_percent = fixed_rate_percent
self.index_name = index_name
self.index_tenor = index_tenor
self.spread_bp = spread_bp
self.interest_payment_frequency = interest_payment_frequency
self.interest_calculation_method = interest_calculation_method
self.accrued_calculation_method = accrued_calculation_method
self.payment_business_day_convention = payment_business_day_convention
self.payment_roll_convention = payment_roll_convention
self.index_reset_frequency = index_reset_frequency
self.index_reset_type = index_reset_type
self.index_fixing_lag = index_fixing_lag
self.first_regular_payment_date = first_regular_payment_date
self.last_regular_payment_date = last_regular_payment_date
self.amortization_schedule = try_copy_to_list(amortization_schedule)
self.payment_business_days = payment_business_days
self.notional_exchange = notional_exchange
self.adjust_interest_to_payment_date = adjust_interest_to_payment_date
self.index_compounding_method = index_compounding_method
self.interest_payment_delay = interest_payment_delay
self.stub_rule = stub_rule
self.index_average_method = index_average_method
self.index_observation_method = index_observation_method
self.index_spread_compounding_method = index_spread_compounding_method
self.interest_calculation_convention = interest_calculation_convention
self.cms_template = cms_template
self.floor_strike_percent = floor_strike_percent
self.index_fixing_ric = index_fixing_ric
self.upfront_amount = upfront_amount
self.index_price_side = index_price_side
self.fixed_rate_percent_schedule = fixed_rate_percent_schedule
@property
def accrued_calculation_method(self):
"""
The day count basis method used to calculate the accrued interest payments (e.g.
dcb_30_360, dcb_30_actual). if instrumentcode is defined, the value comes from
the instrument reference data. in case of a user-defined instrument,
interestcalculationmethod is used.
:return: enum DayCountBasis
"""
return self._get_enum_parameter(DayCountBasis, "accruedCalculationMethod")
@accrued_calculation_method.setter
def accrued_calculation_method(self, value):
self._set_enum_parameter(DayCountBasis, "accruedCalculationMethod", value)
@property
def adjust_interest_to_payment_date(self):
"""
An indication if the coupon dates are adjusted to the payment dates. the
possible values are: adjusted, unadjusted. if instrumentcode is defined,
the value comes from the instrument reference data. in case of a user-defined
instrument, the default value is 'adjusted'.
:return: enum AdjustInterestToPaymentDate
"""
return self._get_enum_parameter(AdjustInterestToPaymentDate, "adjustInterestToPaymentDate")
@adjust_interest_to_payment_date.setter
def adjust_interest_to_payment_date(self, value):
self._set_enum_parameter(AdjustInterestToPaymentDate, "adjustInterestToPaymentDate", value)
@property
def amortization_schedule(self):
"""
The amortization schedule of the instrument. it contains the following
information: startdate, enddate, remainingnotional,
amortizationfrequency, amount, amortizationtype. optional. no default value
applies.
:return: list AmortizationItem
"""
return self._get_list_parameter(AmortizationItem, "amortizationSchedule")
@amortization_schedule.setter
def amortization_schedule(self, value):
self._set_list_parameter(AmortizationItem, "amortizationSchedule", value)
@property
def direction(self):
"""
The indication whether the cash flows of the instrument's leg are paid or
received. the possible values are: paid: the cash flows are paid to the
counterparty, received: the cash flows are received from the counterparty. no
default value applies.
:return: enum Direction
"""
return self._get_enum_parameter(Direction, "direction")
@direction.setter
def direction(self, value):
self._set_enum_parameter(Direction, "direction", value)
@property
def index_average_method(self):
"""
The value of the average index calculation method. the possible values are:
compoundedactual, dailycompoundedaverage, compoundedaveragerate,
arithmeticaverage
:return: enum IndexAverageMethod
"""
return self._get_enum_parameter(IndexAverageMethod, "indexAverageMethod")
@index_average_method.setter
def index_average_method(self, value):
self._set_enum_parameter(IndexAverageMethod, "indexAverageMethod", value)
@property
def index_compounding_method(self):
"""
The method how the interest rate is calculated from the reset floating rates
when the reset frequency is higher than the interest payment frequency (e.g.
daily index reset with quarterly interest payments). the possible values are:
compounded, average, constant, adjustedcompounded, mexicancompounded. if
instrumentcode is defined, the value comes from the instrument reference data.
in case of a user-defined instrument, the default value is 'constant'.
:return: enum IndexCompoundingMethod
"""
return self._get_enum_parameter(IndexCompoundingMethod, "indexCompoundingMethod")
@index_compounding_method.setter
def index_compounding_method(self, value):
self._set_enum_parameter(IndexCompoundingMethod, "indexCompoundingMethod", value)
@property
def index_observation_method(self):
"""
(rfr) method for determining the accrual observation period. the possible values
are: lookback: use the interest period for both rate accrual and interest
payment. periodshift: use the observation period for both rate accrual and
interest payment. mixed: use the observation period for rate accrual and
the interest period for interest payment.
:return: enum IndexObservationMethod
"""
return self._get_enum_parameter(IndexObservationMethod, "indexObservationMethod")
@index_observation_method.setter
def index_observation_method(self, value):
self._set_enum_parameter(IndexObservationMethod, "indexObservationMethod", value)
@property
def index_reset_frequency(self):
"""
The reset frequency for the floating instrument (e.g. annual, semiannual). the
default value is interestpaymentfrequency.
:return: enum Frequency
"""
return self._get_enum_parameter(Frequency, "indexResetFrequency")
@index_reset_frequency.setter
def index_reset_frequency(self, value):
self._set_enum_parameter(Frequency, "indexResetFrequency", value)
@property
def index_reset_type(self):
"""
A type indicating if the floating rate index is reset before the coupon period
starts or at the end of the coupon period. the possible values are: inadvance:
resets the index before the start of the interest period, inarrears: resets the
index at the end of the interest period. the default value is 'inadvance'.
:return: enum IndexResetType
"""
return self._get_enum_parameter(IndexResetType, "indexResetType")
@index_reset_type.setter
def index_reset_type(self, value):
self._set_enum_parameter(IndexResetType, "indexResetType", value)
@property
def index_spread_compounding_method(self):
"""
The method defining how the computed float leg spread is applied to compounded
rate. it applies only when indexcompoundingmethod= compounded. the possible
values are: isdacompounding, nocompounding, isdaflatcompounding. the
default value is 'isdacompounding'.
:return: enum IndexSpreadCompoundingMethod
"""
return self._get_enum_parameter(IndexSpreadCompoundingMethod, "indexSpreadCompoundingMethod")
@index_spread_compounding_method.setter
def index_spread_compounding_method(self, value):
self._set_enum_parameter(IndexSpreadCompoundingMethod, "indexSpreadCompoundingMethod", value)
@property
def interest_calculation_convention(self):
"""
The day count basis method convention used to calculate the interest payments.
optional. defaults to moneymarket. if instrumentcode is defined, the value comes
from the instrument reference data.
:return: enum InterestCalculationConvention
"""
return self._get_enum_parameter(InterestCalculationConvention, "interestCalculationConvention")
@interest_calculation_convention.setter
def interest_calculation_convention(self, value):
self._set_enum_parameter(InterestCalculationConvention, "interestCalculationConvention", value)
@property
def interest_calculation_method(self):
"""
The day count basis method used to calculate the interest payments(e.g.
dcb_30_360, dcb_30_actual). the default value is selected based onnotionalccy.
:return: enum DayCountBasis
"""
return self._get_enum_parameter(DayCountBasis, "interestCalculationMethod")
@interest_calculation_method.setter
def interest_calculation_method(self, value):
self._set_enum_parameter(DayCountBasis, "interestCalculationMethod", value)
@property
def interest_payment_frequency(self):
"""
The frequency of the interest payment. either indexresetfrequency or
interestpaymentfrequency must be provided (e.g. annual, semiannual). the
default value is indexresetfrequency.
:return: enum Frequency
"""
return self._get_enum_parameter(Frequency, "interestPaymentFrequency")
@interest_payment_frequency.setter
def interest_payment_frequency(self, value):
self._set_enum_parameter(Frequency, "interestPaymentFrequency", value)
@property
def interest_type(self):
"""
An indicator whether the instrument pays a fixed or floating interest. the
possible values are: fixed, float. no default value applies.
:return: enum InterestType
"""
return self._get_enum_parameter(InterestType, "interestType")
@interest_type.setter
def interest_type(self, value):
self._set_enum_parameter(InterestType, "interestType", value)
@property
def notional_exchange(self):
"""
An indicator if the notional amount is exchanged and when it is exchanged. the
possible values are: none, start, end, both, endadjustment. the
default value is 'none'.
:return: enum NotionalExchange
"""
return self._get_enum_parameter(NotionalExchange, "notionalExchange")
@notional_exchange.setter
def notional_exchange(self, value):
self._set_enum_parameter(NotionalExchange, "notionalExchange", value)
@property
def payment_business_day_convention(self):
"""
The method to adjust dates to working days. the possible values are:
previousbusinessday, nextbusinessday, modified following, nomoving,
bbswmodifiedfollowing. if instrumentcode is defined, the value comes from the
instrument reference data. in case of a user-defined instrument, the default
value is'modifiedfollowing'.
:return: enum BusinessDayConvention
"""
return self._get_enum_parameter(BusinessDayConvention, "paymentBusinessDayConvention")
@payment_business_day_convention.setter
def payment_business_day_convention(self, value):
self._set_enum_parameter(BusinessDayConvention, "paymentBusinessDayConvention", value)
@property
def payment_roll_convention(self):
"""
The method to adjust payment dates when they fall at the end of the month (e.g.
28th of february, 30th, 31st). the possible values are: last, same,
last28, same28. if instrumentcode is defined, the value comes from the
instrument reference data. in case of a user-defined instrument, the default
value is'same'.
:return: enum DateRollingConvention
"""
return self._get_enum_parameter(DateRollingConvention, "paymentRollConvention")
@payment_roll_convention.setter
def payment_roll_convention(self, value):
self._set_enum_parameter(DateRollingConvention, "paymentRollConvention", value)
@property
def stub_rule(self):
"""
The rule that defines whether coupon roll dates are aligned to the maturity or
issue date. the possible values are: issue, maturity, shortfirstprorata,
shortfirstfull, longfirstfull, shortlastprorata. the default value is
'maturity'.
:return: enum StubRule
"""
return self._get_enum_parameter(StubRule, "stubRule")
@stub_rule.setter
def stub_rule(self, value):
self._set_enum_parameter(StubRule, "stubRule", value)
@property
def cms_template(self):
"""
A reference to a common swap contract that represents the underlying swap in
case of a constant maturity swap contract (cms). example: eur_ab6e. no default
value applies.
:return: str
"""
return self._get_parameter("cmsTemplate")
@cms_template.setter
def cms_template(self, value):
self._set_parameter("cmsTemplate", value)
@property
def first_regular_payment_date(self):
"""
The first regular interest payment date used for the odd first interest period.
the value is expressed in iso 8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g.
2021-01-01t00:00:00z). no default value applies.
:return: str
"""
return self._get_parameter("firstRegularPaymentDate")
@first_regular_payment_date.setter
def first_regular_payment_date(self, value):
self._set_datetime_parameter("firstRegularPaymentDate", value)
@property
def fixed_rate_percent(self):
"""
The interest rate of the instrument. the value is expressed in percentages.
mandatory if no instrumentcode is defined. if instrumentcode is defined, the
value comes from the instrument reference data.
:return: float
"""
return self._get_parameter("fixedRatePercent")
@fixed_rate_percent.setter
def fixed_rate_percent(self, value):
self._set_parameter("fixedRatePercent", value)
@property
def floor_strike_percent(self):
"""
The contractual strike rate of the floor. the value is expressed in percentages.
if this parameter is set, the floor will apply to the leg with the same
parameters set in the swaplegdefinition (e.g.maturity, frequency, index,
discounting rule). no default value applies.
:return: float
"""
return self._get_parameter("floorStrikePercent")
@floor_strike_percent.setter
def floor_strike_percent(self, value):
self._set_parameter("floorStrikePercent", value)
@property
def index_fixing_lag(self):
"""
The number of working daysbetween the fixing date and the start of the coupon
period ('inadvance') or the end of the coupon period ('inarrears'). the
inadvance/inarrears mode is set in the indexresettype parameter. the default
value is the fixing lag associated to the index defined/determined by default on
the floating instrument.
:return: int
"""
return self._get_parameter("indexFixingLag")
@index_fixing_lag.setter
def index_fixing_lag(self, value):
self._set_parameter("indexFixingLag", value)
@property
def index_fixing_ric(self):
"""
The ric that carries the fixing value if the instrument has a floating interest.
optional. mandatory for floating rate instruments if no instrumentcode is
defined. if instrumentcode is defined, the value comes from the instrument
reference data. no default value applies.
:return: str
"""
return self._get_parameter("indexFixingRic")
@index_fixing_ric.setter
def index_fixing_ric(self, value):
self._set_parameter("indexFixingRic", value)
@property
def index_name(self):
"""
The name of the floating rate index (e.g. 'euribor'). no default value applies.
:return: str
"""
return self._get_parameter("indexName")
@index_name.setter
def index_name(self, value):
self._set_parameter("indexName", value)
@property
def index_tenor(self):
"""
The period code indicating the maturity of the floating rate index. the default
value is the tenor equivalent toindexresetfrequency or interestpaymentfrequency.
:return: str
"""
return self._get_parameter("indexTenor")
@index_tenor.setter
def index_tenor(self, value):
self._set_parameter("indexTenor", value)
@property
def instrument_tag(self):
"""
:return: str
"""
return self._get_parameter("instrumentTag")
@instrument_tag.setter
def instrument_tag(self, value):
self._set_parameter("instrumentTag", value)
@property
def interest_payment_delay(self):
"""
The number of working days between the end of the interest accrual period and
the interest payment date. by default, no delay (0) is applied.
:return: int
"""
return self._get_parameter("interestPaymentDelay")
@interest_payment_delay.setter
def interest_payment_delay(self, value):
self._set_parameter("interestPaymentDelay", value)
@property
def last_regular_payment_date(self):
"""
The last regular interest payment date used for the odd last interest period.
the value is expressed in iso 8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g.
2021-01-01t00:00:00z). no default value applies.
:return: str
"""
return self._get_parameter("lastRegularPaymentDate")
@last_regular_payment_date.setter
def last_regular_payment_date(self, value):
self._set_datetime_parameter("lastRegularPaymentDate", value)
@property
def leg_tag(self):
"""
A user-defined string to identify the direction of the leg: 'paid' or
'received'. optional. no default value applies.
:return: str
"""
return self._get_parameter("legTag")
@leg_tag.setter
def leg_tag(self, value):
self._set_parameter("legTag", value)
@property
def notional_amount(self):
"""
The notional amount of the instrument's leg. the default value is '1,000,000'.
:return: float
"""
return self._get_parameter("notionalAmount")
@notional_amount.setter
def notional_amount(self, value):
self._set_parameter("notionalAmount", value)
@property
def notional_ccy(self):
"""
The currency of the instrument's notional amount. the value is expressed in iso
4217 alphabetical format (e.g. 'usd'). no default value applies.
:return: str
"""
return self._get_parameter("notionalCcy")
@notional_ccy.setter
def notional_ccy(self, value):
self._set_parameter("notionalCcy", value)
@property
def payment_business_days(self):
"""
A list of comma-separated calendar codes to adjust dates (e.g. 'emu' or 'usa').
the default value is the calendar associated to the market conventions of the
interestpaymentccy for the corresponding leg.
:return: str
"""
return self._get_parameter("paymentBusinessDays")
@payment_business_days.setter
def payment_business_days(self, value):
self._set_parameter("paymentBusinessDays", value)
@property
def spread_bp(self):
"""
The interest spread in basis points that is added to the floating rate index
value. optional. if instrumentcode is defined, the value comes from the
instrument reference data. in case of a user-defined instrument, the default
value is '0'.
:return: float
"""
return self._get_parameter("spreadBp")
@spread_bp.setter
def spread_bp(self, value):
self._set_parameter("spreadBp", value)
@property
def upfront_amount(self):
"""
The amount which represents the net present value of the swap. it is computed as
[(100 dirtypricepercent / 100) x notionalamount]. the value is expressed in
upfrontamountccy. by default, no payment (0) applies.
:return: float
"""
return self._get_parameter("upfrontAmount")
@upfront_amount.setter
def upfront_amount(self, value):
self._set_parameter("upfrontAmount", value)
@property
def index_price_side(self):
return self._get_enum_parameter(PriceSide, "indexPriceSide")
@index_price_side.setter
def index_price_side(self, value):
self._set_enum_parameter(PriceSide, "indexPriceSide", value)
@property
def fixed_rate_percent_schedule(self):
return self._get_parameter("fixedRatePercentSchedule")
@fixed_rate_percent_schedule.setter
def fixed_rate_percent_schedule(self, value):
self._set_parameter("fixedRatePercentSchedule", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/swap/_swap_leg_definition.py | 0.945412 | 0.701202 | _swap_leg_definition.py | pypi |
from typing import Optional, Union
from ....._types import OptDateTime
from ..._enums import (
IndexConvexityAdjustmentIntegrationMethod,
IndexConvexityAdjustmentMethod,
PriceSide,
TenorReferenceDate,
)
from ..._object_definition import ObjectDefinition
class PricingParameters(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
index_convexity_adjustment_integration_method : IndexConvexityAdjustmentIntegrationMethod or str, optional
The integration method used for static replication method. the possible values
are: riemannsum, rungekutta. the default value is 'riemannsum'.
index_convexity_adjustment_method : IndexConvexityAdjustmentMethod or str, optional
The convexity adjustment type for constant maturity swaps (cms) and libor
in-arrears swaps. the possible values are: none, blackscholes, linearswapmodel,
replication. the default value is 'blackscholes'.
price_side : PriceSide or str, optional
The quoted price side of the instrument. optional. default value is 'mid'.
tenor_reference_date : TenorReferenceDate or str, optional
In case the swap definition uses 'starttenor', 'starttenor' defines whether the
swap start date is calculated from valuation date or from spot date
discounting_ccy : str, optional
The currency code, which defines the choice of the discounting curve. the value
is expressed in iso 4217 alphabetical format (e.g. 'usd'). by default,
settlementccy or the paid leg currency is used.
discounting_tenor : str, optional
market_data_date : str or date or datetime or timedelta, optional
The date at which the market data is retrieved. the value is expressed in iso
8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). it
should be less or equal tovaluationdate). optional. by
default,marketdatadateisvaluationdateor today.
market_value_in_deal_ccy : float, optional
The dirty market value of the instrument computed as
[cleanmarketvalueindealccy+accruedamountindealccy]. the value is expressed in
the deal currency. the default value is '0'.
report_ccy : str, optional
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). it is set for the fields ending with 'xxxinreportccy'. optional. the
default value is the notional currency.
use_legs_signing : bool, optional
Enable the signing of "risk measures" and "valuation" outputs based on leg's
direction the default value is false.
valuation_date : str or date or datetime or timedelta, optional
The date at which the instrument is valued. the value is expressed in iso 8601
format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). by default,
marketdatadate is used. if marketdatadate is not specified, the default value is
today.
"""
def __init__(
self,
index_convexity_adjustment_integration_method: Union[IndexConvexityAdjustmentIntegrationMethod, str] = None,
index_convexity_adjustment_method: Union[IndexConvexityAdjustmentMethod, str] = None,
price_side: Union[PriceSide, str] = None,
tenor_reference_date: Union[TenorReferenceDate, str] = None,
discounting_ccy: Optional[str] = None,
discounting_tenor: Optional[str] = None,
market_data_date: "OptDateTime" = None,
market_value_in_deal_ccy: Optional[float] = None,
report_ccy: Optional[str] = None,
use_legs_signing: Optional[bool] = None,
valuation_date: "OptDateTime" = None,
) -> None:
super().__init__()
self.index_convexity_adjustment_integration_method = index_convexity_adjustment_integration_method
self.index_convexity_adjustment_method = index_convexity_adjustment_method
self.price_side = price_side
self.tenor_reference_date = tenor_reference_date
self.discounting_ccy = discounting_ccy
self.discounting_tenor = discounting_tenor
self.market_data_date = market_data_date
self.market_value_in_deal_ccy = market_value_in_deal_ccy
self.report_ccy = report_ccy
self.use_legs_signing = use_legs_signing
self.valuation_date = valuation_date
@property
def index_convexity_adjustment_integration_method(self):
"""
The integration method used for static replication method. the possible values
are: riemannsum, rungekutta. the default value is 'riemannsum'.
:return: enum IndexConvexityAdjustmentIntegrationMethod
"""
return self._get_enum_parameter(
IndexConvexityAdjustmentIntegrationMethod,
"indexConvexityAdjustmentIntegrationMethod",
)
@index_convexity_adjustment_integration_method.setter
def index_convexity_adjustment_integration_method(self, value):
self._set_enum_parameter(
IndexConvexityAdjustmentIntegrationMethod,
"indexConvexityAdjustmentIntegrationMethod",
value,
)
@property
def index_convexity_adjustment_method(self):
"""
The convexity adjustment type for constant maturity swaps (cms) and libor
in-arrears swaps. the possible values are: none, blackscholes, linearswapmodel,
replication. the default value is 'blackscholes'.
:return: enum IndexConvexityAdjustmentMethod
"""
return self._get_enum_parameter(IndexConvexityAdjustmentMethod, "indexConvexityAdjustmentMethod")
@index_convexity_adjustment_method.setter
def index_convexity_adjustment_method(self, value):
self._set_enum_parameter(IndexConvexityAdjustmentMethod, "indexConvexityAdjustmentMethod", value)
@property
def price_side(self):
"""
The quoted price side of the instrument. optional. default value is 'mid'.
:return: enum PriceSide
"""
return self._get_enum_parameter(PriceSide, "priceSide")
@price_side.setter
def price_side(self, value):
self._set_enum_parameter(PriceSide, "priceSide", value)
@property
def tenor_reference_date(self):
"""
In case the swap definition uses 'starttenor', 'starttenor' defines whether the
swap start date is calculated from valuation date or from spot date
:return: enum TenorReferenceDate
"""
return self._get_enum_parameter(TenorReferenceDate, "tenorReferenceDate")
@tenor_reference_date.setter
def tenor_reference_date(self, value):
self._set_enum_parameter(TenorReferenceDate, "tenorReferenceDate", value)
@property
def discounting_ccy(self):
"""
The currency code, which defines the choice of the discounting curve. the value
is expressed in iso 4217 alphabetical format (e.g. 'usd'). by default,
settlementccy or the paid leg currency is used.
:return: str
"""
return self._get_parameter("discountingCcy")
@discounting_ccy.setter
def discounting_ccy(self, value):
self._set_parameter("discountingCcy", value)
@property
def discounting_tenor(self):
"""
:return: str
"""
return self._get_parameter("discountingTenor")
@discounting_tenor.setter
def discounting_tenor(self, value):
self._set_parameter("discountingTenor", value)
@property
def market_data_date(self):
"""
The date at which the market data is retrieved. the value is expressed in iso
8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). it
should be less or equal tovaluationdate). optional. by
default,marketdatadateisvaluationdateor today.
:return: str
"""
return self._get_parameter("marketDataDate")
@market_data_date.setter
def market_data_date(self, value):
self._set_datetime_parameter("marketDataDate", value)
@property
def market_value_in_deal_ccy(self):
"""
The dirty market value of the instrument computed as
[cleanmarketvalueindealccy+accruedamountindealccy]. the value is expressed in
the deal currency. the default value is '0'.
:return: float
"""
return self._get_parameter("marketValueInDealCcy")
@market_value_in_deal_ccy.setter
def market_value_in_deal_ccy(self, value):
self._set_parameter("marketValueInDealCcy", value)
@property
def report_ccy(self):
"""
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). it is set for the fields ending with 'xxxinreportccy'. optional. the
default value is the notional currency.
:return: str
"""
return self._get_parameter("reportCcy")
@report_ccy.setter
def report_ccy(self, value):
self._set_parameter("reportCcy", value)
@property
def use_legs_signing(self):
"""
Enable the signing of "risk measures" and "valuation" outputs based on leg's
direction the default value is false.
:return: bool
"""
return self._get_parameter("useLegsSigning")
@use_legs_signing.setter
def use_legs_signing(self, value):
self._set_parameter("useLegsSigning", value)
@property
def valuation_date(self):
"""
The date at which the instrument is valued. the value is expressed in iso 8601
format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). by default,
marketdatadate is used. if marketdatadate is not specified, the default value is
today.
:return: str
"""
return self._get_parameter("valuationDate")
@valuation_date.setter
def valuation_date(self, value):
self._set_datetime_parameter("valuationDate", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/swap/_swap_pricing_parameters.py | 0.957048 | 0.551755 | _swap_pricing_parameters.py | pypi |
from typing import Optional, Union
from ....._types import OptDateTime
from .._instrument_definition import InstrumentDefinition
from ..._enums import (
AdjustInterestToPaymentDate,
BusinessDayConvention,
BuySell,
DateRollingConvention,
DayCountBasis,
Frequency,
IndexResetType,
InterestCalculationConvention,
StubRule,
PriceSide,
)
from ..._models import (
AmortizationItem,
BarrierDefinitionElement,
InputFlow,
)
class CapFloorInstrumentDefinition(InstrumentDefinition):
def __init__(
self,
instrument_tag: Optional[str] = None,
start_date: "OptDateTime" = None,
end_date: "OptDateTime" = None,
tenor: Optional[str] = None,
notional_ccy: Optional[str] = None,
notional_amount: Optional[float] = None,
index_name: Optional[str] = None,
index_tenor: Optional[str] = None,
interest_payment_frequency: Union[Frequency, str] = None,
interest_calculation_method: Union[DayCountBasis, str] = None,
payment_business_day_convention: Union[BusinessDayConvention, str] = None,
payment_roll_convention: Union[DateRollingConvention, str] = None,
index_reset_frequency: Union[Frequency, str] = None,
index_reset_type: Union[IndexResetType, str] = None,
index_fixing_lag: Optional[int] = None,
amortization_schedule: Optional[AmortizationItem] = None,
payment_business_days: Optional[str] = None,
adjust_interest_to_payment_date: Union[AdjustInterestToPaymentDate, str] = None,
stub_rule: Union[StubRule, str] = None,
barrier_definition: Optional[BarrierDefinitionElement] = None,
buy_sell: Union[BuySell, str] = None,
interest_calculation_convention: Union[InterestCalculationConvention, str] = None,
payments: Optional[InputFlow] = None,
annualized_rebate: Optional[bool] = None,
cap_digital_payout_percent: Optional[float] = None,
cap_strike_percent: Optional[float] = None,
cms_template: Optional[str] = None,
floor_digital_payout_percent: Optional[float] = None,
floor_strike_percent: Optional[float] = None,
index_fixing_ric: Optional[str] = None,
is_backward_looking_index: Optional[bool] = None,
is_rfr: Optional[bool] = None,
is_term_rate: Optional[bool] = None,
index_price_side: Union[PriceSide, str] = None,
cap_strike_percent_schedule: Optional[dict] = None,
floor_strike_percent_schedule: Optional[dict] = None,
) -> None:
super().__init__()
self.instrument_tag = instrument_tag
self.start_date = start_date
self.end_date = end_date
self.tenor = tenor
self.notional_ccy = notional_ccy
self.notional_amount = notional_amount
self.index_name = index_name
self.index_tenor = index_tenor
self.interest_payment_frequency = interest_payment_frequency
self.interest_calculation_method = interest_calculation_method
self.payment_business_day_convention = payment_business_day_convention
self.payment_roll_convention = payment_roll_convention
self.index_reset_frequency = index_reset_frequency
self.index_reset_type = index_reset_type
self.index_fixing_lag = index_fixing_lag
self.amortization_schedule = amortization_schedule
self.payment_business_days = payment_business_days
self.adjust_interest_to_payment_date = adjust_interest_to_payment_date
self.stub_rule = stub_rule
self.barrier_definition = barrier_definition
self.buy_sell = buy_sell
self.interest_calculation_convention = interest_calculation_convention
self.payments = payments
self.annualized_rebate = annualized_rebate
self.cap_digital_payout_percent = cap_digital_payout_percent
self.cap_strike_percent = cap_strike_percent
self.cms_template = cms_template
self.floor_digital_payout_percent = floor_digital_payout_percent
self.floor_strike_percent = floor_strike_percent
self.index_fixing_ric = index_fixing_ric
self.is_backward_looking_index = is_backward_looking_index
self.is_rfr = is_rfr
self.is_term_rate = is_term_rate
self.index_price_side = index_price_side
self.cap_strike_percent_schedule = cap_strike_percent_schedule
self.floor_strike_percent_schedule = floor_strike_percent_schedule
def get_instrument_type(self):
return "CapFloor"
@property
def adjust_interest_to_payment_date(self):
"""
An indication if the coupon dates are adjusted to the payment dates. the
possible values are: adjusted, unadjusted. optional. the default value is
'unadjusted'.
:return: enum AdjustInterestToPaymentDate
"""
return self._get_enum_parameter(AdjustInterestToPaymentDate, "adjustInterestToPaymentDate")
@adjust_interest_to_payment_date.setter
def adjust_interest_to_payment_date(self, value):
self._set_enum_parameter(AdjustInterestToPaymentDate, "adjustInterestToPaymentDate", value)
@property
def amortization_schedule(self):
"""
The amortization schedule of the instrument. it contains the following
information: startdate, enddate, remainingnotional,
amortizationfrequency, amount, amortizationtype. no default value
applies.
:return: list AmortizationItem
"""
return self._get_list_parameter(AmortizationItem, "amortizationSchedule")
@amortization_schedule.setter
def amortization_schedule(self, value):
self._set_list_parameter(AmortizationItem, "amortizationSchedule", value)
@property
def barrier_definition(self):
"""
:return: object BarrierDefinitionElement
"""
return self._get_object_parameter(BarrierDefinitionElement, "barrierDefinition")
@barrier_definition.setter
def barrier_definition(self, value):
self._set_object_parameter(BarrierDefinitionElement, "barrierDefinition", value)
@property
def buy_sell(self):
"""
The indicator of the deal side. the possible values are: buy: buying the
option, sell: selling/writing the option. mandatory. no default value
applies.
:return: enum BuySell
"""
return self._get_enum_parameter(BuySell, "buySell")
@buy_sell.setter
def buy_sell(self, value):
self._set_enum_parameter(BuySell, "buySell", value)
@property
def index_reset_frequency(self):
"""
The reset frequency for the floating instrument. optional. by default, the reset
frequency associated to the index defined for the floating leg is used.
:return: enum Frequency
"""
return self._get_enum_parameter(Frequency, "indexResetFrequency")
@index_reset_frequency.setter
def index_reset_frequency(self, value):
self._set_enum_parameter(Frequency, "indexResetFrequency", value)
@property
def index_reset_type(self):
"""
A flag that indicates if the floating rate index is reset before the coupon
period starts or at the end of the coupon period. the possible values are:
inadvance (resets the index before the start of the interest period),
inarrears (resets the index at the end of the interest period). optional. the
default value is 'inadvance'.
:return: enum IndexResetType
"""
return self._get_enum_parameter(IndexResetType, "indexResetType")
@index_reset_type.setter
def index_reset_type(self, value):
self._set_enum_parameter(IndexResetType, "indexResetType", value)
@property
def interest_calculation_convention(self):
"""
The day count basis method convention used to calculate the interest payments.
optional. defaults to moneymarket.
:return: enum InterestCalculationConvention
"""
return self._get_enum_parameter(InterestCalculationConvention, "interestCalculationConvention")
@interest_calculation_convention.setter
def interest_calculation_convention(self, value):
self._set_enum_parameter(InterestCalculationConvention, "interestCalculationConvention", value)
@property
def interest_calculation_method(self):
"""
The interest payment frequency (e.g : annual, semiannual, quartely). the default
value is selected based onnotionalccy.
:return: enum DayCountBasis
"""
return self._get_enum_parameter(DayCountBasis, "interestCalculationMethod")
@interest_calculation_method.setter
def interest_calculation_method(self, value):
self._set_enum_parameter(DayCountBasis, "interestCalculationMethod", value)
@property
def interest_payment_frequency(self):
"""
The interest payment frequency. by default, indextenor is used, if it is
defined. otherwise, the default value is indexresetfrequency.
:return: enum Frequency
"""
return self._get_enum_parameter(Frequency, "interestPaymentFrequency")
@interest_payment_frequency.setter
def interest_payment_frequency(self, value):
self._set_enum_parameter(Frequency, "interestPaymentFrequency", value)
@property
def payment_business_day_convention(self):
"""
The method to adjust dates to working days. the possible values are:
previousbusinessday, nextbusinessday, modified following, nomoving,
bswmodifiedfollowing. optional. if instrumentcode is defined, the value comes
from the instrument reference data. in case of a user-defined instrument, the
default value is'modifiedfollowing'.
:return: enum BusinessDayConvention
"""
return self._get_enum_parameter(BusinessDayConvention, "paymentBusinessDayConvention")
@payment_business_day_convention.setter
def payment_business_day_convention(self, value):
self._set_enum_parameter(BusinessDayConvention, "paymentBusinessDayConvention", value)
@property
def payment_roll_convention(self):
"""
The method to adjust payment dates when they fall at the end of the month (e.g.
28th of february, 30th, 31st). the possible values are: last, same,
last28, same28. optional. if instrumentcode is defined, the value comes from
the instrument reference data. in case of a user-defined instrument, the default
value is'last'.
:return: enum DateRollingConvention
"""
return self._get_enum_parameter(DateRollingConvention, "paymentRollConvention")
@payment_roll_convention.setter
def payment_roll_convention(self, value):
self._set_enum_parameter(DateRollingConvention, "paymentRollConvention", value)
@property
def payments(self):
"""
An array of historical payments
:return: list InputFlow
"""
return self._get_list_parameter(InputFlow, "payments")
@payments.setter
def payments(self, value):
self._set_list_parameter(InputFlow, "payments", value)
@property
def stub_rule(self):
"""
The rule that defines whether coupon roll dates are aligned to the maturity or
issue date. the possible values are: issue, maturity,
shortfirstprorata, shortfirstfull, longfirstfull, shortlastprorata.
optional. the default value is 'maturity'.
:return: enum StubRule
"""
return self._get_enum_parameter(StubRule, "stubRule")
@stub_rule.setter
def stub_rule(self, value):
self._set_enum_parameter(StubRule, "stubRule", value)
@property
def annualized_rebate(self):
"""
An indicator if the rebate is adjusted according to the frequency of the cap.
for example, if the frequency of the cap is quarterly (frq:4), all rebates are
divided by four (approximately). the possible values are: true: the rebates
are adjusted, false: there is no rebate adjustment(the rebate value is assumed
to be annualized). the default value is 'false'.
:return: bool
"""
return self._get_parameter("annualizedRebate")
@annualized_rebate.setter
def annualized_rebate(self, value):
self._set_parameter("annualizedRebate", value)
@property
def cap_digital_payout_percent(self):
"""
A percentage of notionalamount that is received (paid) by the option buyer
(seller) if the option expires on or above the cap strike. no default value
applies.
:return: float
"""
return self._get_parameter("capDigitalPayoutPercent")
@cap_digital_payout_percent.setter
def cap_digital_payout_percent(self, value):
self._set_parameter("capDigitalPayoutPercent", value)
@property
def cap_strike_percent(self):
"""
The contractual strike rate of the cap. the value is expressed in percentages.
either capstrikepercent or capstrikepercentschedule must be provided. no default
value applies.
:return: float
"""
return self._get_parameter("capStrikePercent")
@cap_strike_percent.setter
def cap_strike_percent(self, value):
self._set_parameter("capStrikePercent", value)
@property
def cms_template(self):
"""
A reference to a common swap contract that represents the underlying swap in
case of a constant maturity swap contract (cms). example: eur_ab6e. no default
value applies.
:return: str
"""
return self._get_parameter("cmsTemplate")
@cms_template.setter
def cms_template(self, value):
self._set_parameter("cmsTemplate", value)
@property
def end_date(self):
"""
The maturity or expiry date of the instrument. the value is expressed in iso
8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g. 2021-01-01t00:00:00z). either
tenor or enddate must be provided. the default value is valuationdate shifted
forward by tenor.
:return: str
"""
return self._get_parameter("endDate")
@end_date.setter
def end_date(self, value):
self._set_datetime_parameter("endDate", value)
@property
def floor_digital_payout_percent(self):
"""
A percentage of notionalamount that is received (paid) by the option buyer
(seller) if the option expires on or below the floor strike. no default value
applies.
:return: float
"""
return self._get_parameter("floorDigitalPayoutPercent")
@floor_digital_payout_percent.setter
def floor_digital_payout_percent(self, value):
self._set_parameter("floorDigitalPayoutPercent", value)
@property
def floor_strike_percent(self):
"""
The contractual strike rate of the floor. the value is expressed in percentages.
either floorstrikepercent or floorstrikepercentschedule must be provided. no
default value applies.
:return: float
"""
return self._get_parameter("floorStrikePercent")
@floor_strike_percent.setter
def floor_strike_percent(self, value):
self._set_parameter("floorStrikePercent", value)
@property
def index_fixing_lag(self):
"""
The number of working days between the fixing date of the index and the start of
the coupon period ('inadvance') or the end of the coupon period ('inarrears').
optional. if indexfixingric or indexname is defined, the associated fixing lag
is selected. otherwise, the value is based on the first fixing that matches
currency and indextenor.
:return: int
"""
return self._get_parameter("indexFixingLag")
@index_fixing_lag.setter
def index_fixing_lag(self, value):
self._set_parameter("indexFixingLag", value)
@property
def index_fixing_ric(self):
"""
The ric that carries the fixing value if the instrument has a floating interest.
optional. no default value applies.
:return: str
"""
return self._get_parameter("indexFixingRic")
@index_fixing_ric.setter
def index_fixing_ric(self, value):
self._set_parameter("indexFixingRic", value)
@property
def index_name(self):
"""
The name of the floating rate index (e.g. 'euribor'). optional. no default value
applies.
:return: str
"""
return self._get_parameter("indexName")
@index_name.setter
def index_name(self, value):
self._set_parameter("indexName", value)
@property
def index_tenor(self):
"""
The period code indicating the maturity of the floating rate index. if
indexfixingric is defined, the values comes from the instrument reference data.
otherwise, the default value is the tenor equivalent toindexresetfrequency or
interestpaymentfrequency.
:return: str
"""
return self._get_parameter("indexTenor")
@index_tenor.setter
def index_tenor(self, value):
self._set_parameter("indexTenor", value)
@property
def instrument_tag(self):
"""
A user defined string to identify the instrument. it can be used to link output
results to the instrument definition.limited to 40 characters. only alphabetic,
numeric and '- _.#=@' characters are supported. optional. no default value
applies.
:return: str
"""
return self._get_parameter("instrumentTag")
@instrument_tag.setter
def instrument_tag(self, value):
self._set_parameter("instrumentTag", value)
@property
def is_backward_looking_index(self):
"""
An indicator if the underlying fixing rate is backward-looking (mostly
risk-free-rate). the possible values are: false: the underlying fixing rate is
forward-looking and the rate of each caplet is fixed at the start of the caplet
period. true: the underlying fixing rate is backward-looking. for backward
looking caps/floors the rate is accreted up to the end of the caplet period.
:return: bool
"""
return self._get_parameter("isBackwardLookingIndex")
@is_backward_looking_index.setter
def is_backward_looking_index(self, value):
self._set_parameter("isBackwardLookingIndex", value)
@property
def is_rfr(self):
"""
:return: bool
"""
return self._get_parameter("isRfr")
@is_rfr.setter
def is_rfr(self, value):
self._set_parameter("isRfr", value)
@property
def is_term_rate(self):
"""
:return: bool
"""
return self._get_parameter("isTermRate")
@is_term_rate.setter
def is_term_rate(self, value):
self._set_parameter("isTermRate", value)
@property
def notional_amount(self):
"""
The notional amount of the instrument. optional. the default value is
'1,000,000' in notionalccy.
:return: float
"""
return self._get_parameter("notionalAmount")
@notional_amount.setter
def notional_amount(self, value):
self._set_parameter("notionalAmount", value)
@property
def notional_ccy(self):
"""
The currency of the instrument's notional amount. the value is expressed in iso
4217 alphabetical format (e.g. 'usd'). no default value applies. mandatory if
instrument code or instrument style has not been defined. in case an instrument
code/style has been defined, value may comes from the reference data.
:return: str
"""
return self._get_parameter("notionalCcy")
@notional_ccy.setter
def notional_ccy(self, value):
self._set_parameter("notionalCcy", value)
@property
def payment_business_days(self):
"""
A list of comma-separated calendar codes to adjust dates (e.g. 'emu' or 'usa').
no default value applies.
:return: str
"""
return self._get_parameter("paymentBusinessDays")
@payment_business_days.setter
def payment_business_days(self, value):
self._set_parameter("paymentBusinessDays", value)
@property
def start_date(self):
"""
The start date of the instrument. this value is expressed in iso 8601 format:
yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g. 2021-01-01t00:00:00z). the default value is
valuationdate shifted forward by a month using market conventions.
:return: str
"""
return self._get_parameter("startDate")
@start_date.setter
def start_date(self, value):
self._set_datetime_parameter("startDate", value)
@property
def tenor(self):
"""
The code indicating the tenor of the instrument (e.g. '5m'). mandatory if
enddate is not provided. no default value applies.
:return: str
"""
return self._get_parameter("tenor")
@tenor.setter
def tenor(self, value):
self._set_parameter("tenor", value)
@property
def index_price_side(self):
return self._get_enum_parameter(PriceSide, "indexPriceSide")
@index_price_side.setter
def index_price_side(self, value):
self._set_enum_parameter(PriceSide, "indexPriceSide", value)
@property
def cap_strike_percent_schedule(self):
return self._get_parameter("capStrikePercentSchedule")
@cap_strike_percent_schedule.setter
def cap_strike_percent_schedule(self, value):
self._set_parameter("capStrikePercentSchedule", value)
@property
def floor_strike_percent_schedule(self):
return self._get_parameter("floorStrikePercentSchedule")
@floor_strike_percent_schedule.setter
def floor_strike_percent_schedule(self, value):
self._set_parameter("floorStrikePercentSchedule", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/cap_floor/_cap_floor_definition.py | 0.923152 | 0.288056 | _cap_floor_definition.py | pypi |
from typing import Optional, Union
from ....._types import OptDateTime
from ..._enums import (
IndexConvexityAdjustmentIntegrationMethod,
IndexConvexityAdjustmentMethod,
PriceSide,
)
from ..._object_definition import ObjectDefinition
class PricingParameters(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
index_convexity_adjustment_integration_method : IndexConvexityAdjustmentIntegrationMethod, optional
index_convexity_adjustment_method : IndexConvexityAdjustmentMethod, optional
price_side : PriceSide or str, optional
The quoted price side of the instrument. optional. default value is 'mid'.
market_data_date : str or date or datetime or timedelta, optional
The market data date for pricing. Optional. By default, the marketDataDate date
is the ValuationDate or Today
market_value_in_deal_ccy : float, optional
MarketValueInDealCcy to override and that will be used as pricing analysis input
to compute VolatilityPercent. Optional. No override is applied by default. Note
that Premium takes priority over Volatility input.
report_ccy : str, optional
Valuation is performed in deal currency. If a report currency is set, valuation
is done in that report currency.
skip_first_cap_floorlet : bool, optional
Indicates whether to take in consideration the first caplet
valuation_date : str or date or datetime or timedelta, optional
The valuation date for pricing. Optional. If not set the valuation date is
equal to MarketDataDate or Today. For assets that contains a
settlementConvention, the default valuation date is equal to the settlementdate
of the Asset that is usually the TradeDate+SettlementConvention.
implied_volatility_bp : float, optional
User defined implied normal volatility, expressed in basis points.
implied_volatility_percent : float, optional
User defined implied lognormal volatility, expressed in percent.
"""
def __init__(
self,
index_convexity_adjustment_integration_method: Optional[IndexConvexityAdjustmentIntegrationMethod] = None,
index_convexity_adjustment_method: Optional[IndexConvexityAdjustmentMethod] = None,
price_side: Union[PriceSide, str] = None,
market_data_date: "OptDateTime" = None,
market_value_in_deal_ccy: Optional[float] = None,
report_ccy: Optional[str] = None,
skip_first_cap_floorlet: Optional[bool] = None,
valuation_date: "OptDateTime" = None,
implied_volatility_bp: Optional[float] = None,
implied_volatility_percent: Optional[float] = None,
) -> None:
super().__init__()
self.index_convexity_adjustment_integration_method = index_convexity_adjustment_integration_method
self.index_convexity_adjustment_method = index_convexity_adjustment_method
self.price_side = price_side
self.market_data_date = market_data_date
self.market_value_in_deal_ccy = market_value_in_deal_ccy
self.report_ccy = report_ccy
self.skip_first_cap_floorlet = skip_first_cap_floorlet
self.valuation_date = valuation_date
self.implied_volatility_bp = implied_volatility_bp
self.implied_volatility_percent = implied_volatility_percent
@property
def index_convexity_adjustment_integration_method(self):
"""
:return: enum IndexConvexityAdjustmentIntegrationMethod
"""
return self._get_enum_parameter(
IndexConvexityAdjustmentIntegrationMethod,
"indexConvexityAdjustmentIntegrationMethod",
)
@index_convexity_adjustment_integration_method.setter
def index_convexity_adjustment_integration_method(self, value):
self._set_enum_parameter(
IndexConvexityAdjustmentIntegrationMethod,
"indexConvexityAdjustmentIntegrationMethod",
value,
)
@property
def index_convexity_adjustment_method(self):
"""
:return: enum IndexConvexityAdjustmentMethod
"""
return self._get_enum_parameter(IndexConvexityAdjustmentMethod, "indexConvexityAdjustmentMethod")
@index_convexity_adjustment_method.setter
def index_convexity_adjustment_method(self, value):
self._set_enum_parameter(IndexConvexityAdjustmentMethod, "indexConvexityAdjustmentMethod", value)
@property
def price_side(self):
"""
The quoted price side of the instrument. Optional. Default value is 'mid'.
:return: enum PriceSide
"""
return self._get_enum_parameter(PriceSide, "priceSide")
@price_side.setter
def price_side(self, value):
self._set_enum_parameter(PriceSide, "priceSide", value)
@property
def market_data_date(self):
"""
The market data date for pricing. Optional. By default, the marketDataDate date
is the ValuationDate or Today
:return: str
"""
return self._get_parameter("marketDataDate")
@market_data_date.setter
def market_data_date(self, value):
self._set_datetime_parameter("marketDataDate", value)
@property
def market_value_in_deal_ccy(self):
"""
MarketValueInDealCcy to override and that will be used as pricing analysis input
to compute VolatilityPercent. Optional. No override is applied by default. Note
that Premium takes priority over Volatility input.
:return: float
"""
return self._get_parameter("marketValueInDealCcy")
@market_value_in_deal_ccy.setter
def market_value_in_deal_ccy(self, value):
self._set_parameter("marketValueInDealCcy", value)
@property
def report_ccy(self):
"""
Valuation is performed in deal currency. If a report currency is set, valuation
is done in that report currency.
:return: str
"""
return self._get_parameter("reportCcy")
@report_ccy.setter
def report_ccy(self, value):
self._set_parameter("reportCcy", value)
@property
def skip_first_cap_floorlet(self):
"""
Indicates whether to take in consideration the first caplet
:return: bool
"""
return self._get_parameter("skipFirstCapFloorlet")
@skip_first_cap_floorlet.setter
def skip_first_cap_floorlet(self, value):
self._set_parameter("skipFirstCapFloorlet", value)
@property
def valuation_date(self):
"""
The valuation date for pricing. Optional. If not set the valuation date is
equal to MarketDataDate or Today. For assets that contains a
settlementConvention, the default valuation date is equal to the settlementdate
of the Asset that is usually the TradeDate+SettlementConvention.
:return: str
"""
return self._get_parameter("valuationDate")
@valuation_date.setter
def valuation_date(self, value):
self._set_datetime_parameter("valuationDate", value)
@property
def implied_volatility_bp(self):
return self._get_parameter("impliedVolatilityBp")
@implied_volatility_bp.setter
def implied_volatility_bp(self, value):
self._set_parameter("impliedVolatilityBp", value)
@property
def implied_volatility_percent(self):
return self._get_parameter("impliedVolatilityPercent")
@implied_volatility_percent.setter
def implied_volatility_percent(self, value):
self._set_parameter("impliedVolatilityPercent", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/cap_floor/_cap_floor_pricing_parameters.py | 0.952442 | 0.468365 | _cap_floor_pricing_parameters.py | pypi |
from typing import Optional, Union, List, TYPE_CHECKING
from ._cap_floor_definition import CapFloorInstrumentDefinition
from ._cap_floor_pricing_parameters import PricingParameters
from ..._enums import (
AdjustInterestToPaymentDate,
IndexResetType,
Frequency,
DateRollingConvention,
DayCountBasis,
StubRule,
BuySell,
BusinessDayConvention,
PriceSide,
)
from ..._models import (
AmortizationItem,
BarrierDefinitionElement,
)
from .._base_definition import BaseDefinition
from ..._enums import InterestCalculationConvention
from ..._models import InputFlow
from ....._tools import validate_types, try_copy_to_list
if TYPE_CHECKING:
from ....._types import ExtendedParams, OptStrStrs, OptDateTime
class Definition(BaseDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
instrument_tag : str, optional
User defined string to identify the instrument.It can be used to link output
results to the instrument definition. Only alphabetic, numeric and '- _.#=@'
characters are supported. Optional.
start_date : str or date or datetime or timedelta, optional
The option start date
end_date : str or date or datetime or timedelta, optional
The maturity date of the CapFloor
tenor : str, optional
Tenor of the option
notional_ccy : str, optional
The ISO code of the notional currency. Mandatory if instrument code or
instrument style has not been defined. In case an instrument code/style has been
defined, value may comes from the reference data.
notional_amount : float, optional
The notional amount of the leg at the period start date. Optional. By default
1,000,000 is used.
index_name : str, optional
The name of the floating rate index.
index_tenor : str, optional
The period code that represents the maturity of the floating rate index.
Mandatory when the leg is float.
interest_payment_frequency : Frequency or str, optional
The frequency of the interest payments. Optional if an instrument code/style
have been defined : in that case, value comes from reference data. Otherwise, it
is mandatory.
interest_calculation_method : DayCountBasis or str, optional
The Day Count Basis method used to calculate the coupon interest payments.
Mandatory.
payment_business_day_convention : BusinessDayConvention or str, optional
The method to adjust dates to a working day.
payment_roll_convention : DateRollingConvention or str, optional
The method to adjust payment dates whn they fall at the end of the month (28th
of February, 30th, 31st). Optional. By default 'SameDay' is used.
index_reset_frequency : Frequency or str, optional
The reset frequency in case the leg Type is Float. Optional. By default the
IndexTenor is used.
index_reset_type : IndexResetType or str, optional
A flag that indicates if the floating rate index is reset before the coupon
period starts or at the end of the coupon period. Optional. By
default 'InAdvance' is used.
index_fixing_lag : int, optional
Defines the positive number of working days between the fixing date and the
start of the coupon period ('InAdvance') or the end of the coupon period
('InArrears'). Optional. By default 0 is used.
amortization_schedule : list of AmortizationItem, optional
Definition of amortizations
payment_business_days : str, optional
A list of coma-separated calendar codes to adjust dates (e.g. 'EMU' or 'USA').
Optional. By default, the calendar associated to NotionalCcy is used.
adjust_interest_to_payment_date : AdjustInterestToPaymentDate or str, optional
A flag that indicates if the coupon dates are adjusted to the payment dates.
Optional. By default, 'false' is used.
stub_rule : StubRule or str, optional
The rule that defines whether coupon roll dates are aligned on the maturity or
the issue date. Optional. By default, 'Maturity' is used.
barrier_definition : BarrierDefinitionElement, optional
buy_sell : BuySell, optional
The side of the deal. Possible values:
- Buy
- Sell
annualized_rebate : bool, optional
cap_digital_payout_percent : float, optional
cap_strike_percent : float, optional
Cap leg strike expressed in %
cms_template : str, optional
A reference to a common swap contract that represents the underlying swap in
case of a Constant Maturity Swap contract (CMS). Mandatory for CMS contract.
floor_digital_payout_percent : float, optional
floor_strike_percent : float, optional
Floor leg strike expressed in %
index_fixing_ric : str, optional
The RIC that carries the fixing value. This value overrides the RIC associated
by default with the IndexName and IndexTenor. Optional.
fields: list of str, optional
Contains the list of Analytics that the quantitative analytic service will
compute.
pricing_parameters : PricingParameters, optional
The pricing parameters to apply to this instrument. Optional. If pricing
parameters are not provided at this level parameters defined globally at the
request level are used. If no pricing parameters are provided globally default
values apply.
extended_params : dict, optional
If necessary other parameters
index_price_side : PriceSide, optional
The side that is selected for an index supporting Bid/Ask/Mid (which is the case of deposits).
cap_strike_percent_schedule : dict, optional
The schedule of the dates and cap strike rates. The rates are expressed in percentages.
Either capStrikePercent or capStrikePercentSchedule must be provided.
No default value applies.
floor_strike_percent_schedule : dict, optional
The schedule of the dates and floor strike rates. The rates are expressed in percentages.
Either FloorStrikePercent or FloorStrikePercentSchedule must be provided.
No default value applies.
Methods
-------
get_data(session=session, on_response=on_response)
Returns a response to the data platform
get_stream(session=session)
Get stream object of this definition
Examples
--------
>>> import refinitiv.data.content.ipa.financial_contracts as rdf
>>> definition = rdf.cap_floor.Definition(
... instrument_tag="CapOnCms",
... stub_rule=rdf.cap_floor.StubRule.MATURITY,
... notional_ccy="USD",
... start_date="2018-06-15",
... end_date="2022-06-15",
... notional_amount=1000000,
... index_name="Composite",
... index_tenor="5Y",
... interest_calculation_method="Dcb_Actual_360",
... interest_payment_frequency=rdf.cap_floor.Frequency.QUARTERLY,
... buy_sell=rdf.cap_floor.BuySell.BUY,
... cap_strike_percent=1,
... pricing_parameters=rdf.cap_floor.PricingParameters(
... skip_first_cap_floorlet=False, valuation_date="2020-02-07"
... ),
... fields=[
... "InstrumentTag",
... "InstrumentDescription",
... "StartDate",
... "EndDate",
... "InterestPaymentFrequency",
... "IndexRic",
... "CapStrikePercent",
... "FloorStrikePercent",
... "NotionalCcy",
... "NotionalAmount",
... "PremiumBp",
... "PremiumPercent",
... "MarketValueInDealCcy",
... "MarketValueInReportCcy",
... "ErrorMessage",
... ],
...)
>>> response = definition.get_data()
Using get_stream
>>> response = definition.get_stream()
"""
def __init__(
self,
instrument_tag: Optional[str] = None,
start_date: "OptDateTime" = None,
end_date: "OptDateTime" = None,
tenor: Optional[str] = None,
notional_ccy: Optional[str] = None,
notional_amount: Optional[float] = None,
index_name: Optional[str] = None,
index_tenor: Optional[str] = None,
interest_payment_frequency: Union[Frequency, str] = None,
interest_calculation_method: Union[DayCountBasis, str] = None,
payment_business_day_convention: Union[BusinessDayConvention, str] = None,
payment_roll_convention: Union[DateRollingConvention, str] = None,
index_reset_frequency: Union[Frequency, str] = None,
index_reset_type: Union[IndexResetType, str] = None,
index_fixing_lag: Optional[int] = None,
amortization_schedule: Optional[List[AmortizationItem]] = None,
payment_business_days: Optional[str] = None,
adjust_interest_to_payment_date: Union[AdjustInterestToPaymentDate, str] = None,
stub_rule: Union[StubRule, str] = None,
barrier_definition: Optional[BarrierDefinitionElement] = None,
buy_sell: Union[BuySell, str] = None,
interest_calculation_convention: Union[InterestCalculationConvention, str] = None,
payments: Optional[List[InputFlow]] = None,
annualized_rebate: Optional[bool] = None,
cap_digital_payout_percent: Optional[float] = None,
cap_strike_percent: Optional[float] = None,
cms_template: Optional[str] = None,
floor_digital_payout_percent: Optional[float] = None,
floor_strike_percent: Optional[float] = None,
index_fixing_ric: Optional[str] = None,
is_backward_looking_index: Optional[bool] = None,
is_rfr: Optional[bool] = None,
is_term_rate: Optional[bool] = None,
fields: "OptStrStrs" = None,
pricing_parameters: Optional[PricingParameters] = None,
extended_params: "ExtendedParams" = None,
index_price_side: Optional[PriceSide] = None,
cap_strike_percent_schedule: Optional[dict] = None,
floor_strike_percent_schedule: Optional[dict] = None,
):
validate_types(index_fixing_lag, [int, type(None)], "index_fixing_lag")
amortization_schedule = try_copy_to_list(amortization_schedule)
payments = try_copy_to_list(payments)
fields = try_copy_to_list(fields)
definition = CapFloorInstrumentDefinition(
instrument_tag=instrument_tag,
start_date=start_date,
end_date=end_date,
tenor=tenor,
notional_ccy=notional_ccy,
notional_amount=notional_amount,
index_name=index_name,
index_tenor=index_tenor,
interest_payment_frequency=interest_payment_frequency,
interest_calculation_method=interest_calculation_method,
payment_business_day_convention=payment_business_day_convention,
payment_roll_convention=payment_roll_convention,
index_reset_frequency=index_reset_frequency,
index_reset_type=index_reset_type,
index_fixing_lag=index_fixing_lag,
amortization_schedule=amortization_schedule,
payment_business_days=payment_business_days,
adjust_interest_to_payment_date=adjust_interest_to_payment_date,
stub_rule=stub_rule,
barrier_definition=barrier_definition,
buy_sell=buy_sell,
interest_calculation_convention=interest_calculation_convention,
payments=payments,
annualized_rebate=annualized_rebate,
cap_digital_payout_percent=cap_digital_payout_percent,
cap_strike_percent=cap_strike_percent,
cms_template=cms_template,
floor_digital_payout_percent=floor_digital_payout_percent,
floor_strike_percent=floor_strike_percent,
index_fixing_ric=index_fixing_ric,
is_backward_looking_index=is_backward_looking_index,
is_rfr=is_rfr,
is_term_rate=is_term_rate,
index_price_side=index_price_side,
cap_strike_percent_schedule=cap_strike_percent_schedule,
floor_strike_percent_schedule=floor_strike_percent_schedule,
)
super().__init__(
definition=definition,
fields=fields,
pricing_parameters=pricing_parameters,
extended_params=extended_params,
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/cap_floor/_definition.py | 0.939491 | 0.616734 | _definition.py | pypi |
from typing import Optional, Union
from ....._types import OptDateTime
from ..._enums import PriceSide, FxSwapCalculationMethod, ImpliedDepositDateConvention
from ..._object_definition import ObjectDefinition
class PricingParameters(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
fx_swap_calculation_method : FxSwapCalculationMethod or str, optional
The method used to calculate an outright price or deposit rates. The possible
values are. Optional. defaults to 'fxswap'.
implied_deposit_date_convention : ImpliedDepositDateConvention or str, optional
An indicator for the "depositccy1impliedfromfxswap",
"depositccy2impliedfromfxswap" calculation methods, used to check whether dts or
dtm period is selected for the implied deposits calculation. By default, for the
implied deposits calculation we use the dts(day to spot) period, while for the
deposits the dtm(day to money market) period is used.
price_side : PriceSide or str, optional
The quoted price side of the instrument. Optional. defaults to 'mid'.
adjust_all_deposit_points_to_cross_calendars : bool, optional
An indicator if depositccy1marketdata and depositccy2marketdata are adjusted to
the cross calendar dates. the possible values are:
- true: the market data is adjusted according to the cross calendar dates,
- false: unmodified market data is returned, cross calendar is ignored.
optional. defaults to 'false'.
adjust_all_swap_points_to_cross_calendars : bool, optional
An indicator if fxswapsccy1 and fxswapsccy2 are adjusted to the cross calendar
dates. the possible values are:
- true: the market data is adjusted according to the cross calendar dates,
- false: unmodified market data is returned, cross calendar is ignored.
optional. defaults to 'false'.
ignore_ref_ccy_holidays : bool, optional
An indicator if holidays of the reference currency are included or not in the
pricing when dates are computed. by default, the reference currency is usd. the
possible values are:
- true: holidays are ignored,
- false: holidays are included in pricing. optional. defaults to 'false'.
market_data_date : str or date or datetime or timedelta, optional
The date at which the market data is retrieved. the value is expressed in iso
8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). it
should be less or equal tovaluationdate). optional. by
default,marketdatadateisvaluationdateor today.
report_ccy : str, optional
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). it is set for the fields ending with 'xxxinreportccy'. optional. the
default value is the notional currency.
use_direct_quote : bool, optional
An indicator if the spot and the swap points should be retrieved without a pivot
currency
- true: the market data is retrieved directly
- false: unmodified market data is returned. optional. defaults to 'false'.
valuation_date : str or date or datetime or timedelta, optional
The valuation date for pricing. Optional. If not set the valuation date is
equal to market_data_date or Today. For assets that contains a
settlementConvention, the default valuation date is equal to the settlementdate
of the Asset that is usually the TradeDate+SettlementConvention.
"""
def __init__(
self,
fx_swap_calculation_method: Union[FxSwapCalculationMethod, str] = None,
implied_deposit_date_convention: Union[ImpliedDepositDateConvention, str] = None,
price_side: Union[PriceSide, str] = None,
adjust_all_deposit_points_to_cross_calendars: Optional[bool] = None,
adjust_all_swap_points_to_cross_calendars: Optional[bool] = None,
ignore_ref_ccy_holidays: Optional[bool] = None,
market_data_date: "OptDateTime" = None,
report_ccy: Optional[str] = None,
use_direct_quote: Optional[bool] = None,
valuation_date: "OptDateTime" = None,
) -> None:
super().__init__()
self.fx_swap_calculation_method = fx_swap_calculation_method
self.implied_deposit_date_convention = implied_deposit_date_convention
self.price_side = price_side
self.adjust_all_deposit_points_to_cross_calendars = adjust_all_deposit_points_to_cross_calendars
self.adjust_all_swap_points_to_cross_calendars = adjust_all_swap_points_to_cross_calendars
self.ignore_ref_ccy_holidays = ignore_ref_ccy_holidays
self.market_data_date = market_data_date
self.report_ccy = report_ccy
self.use_direct_quote = use_direct_quote
self.valuation_date = valuation_date
@property
def fx_swap_calculation_method(self):
"""
The method we chose to price outright using or not implied deposits. Possible
values are: FxSwap (compute outright using swap points),
DepositCcy1ImpliedFromFxSwap (compute currency1 deposits using swap points),
DepositCcy2ImpliedFromFxSwap (compute currency2 deposits using swap points).
Optional. Defaults to 'FxSwap'.
:return: enum FxSwapCalculationMethod
"""
return self._get_enum_parameter(FxSwapCalculationMethod, "fxSwapCalculationMethod")
@fx_swap_calculation_method.setter
def fx_swap_calculation_method(self, value):
self._set_enum_parameter(FxSwapCalculationMethod, "fxSwapCalculationMethod", value)
@property
def implied_deposit_date_convention(self):
"""
:return: object FxPoint
"""
return self._get_enum_parameter(ImpliedDepositDateConvention, "impliedDepositDateConvention")
@implied_deposit_date_convention.setter
def implied_deposit_date_convention(self, value):
self._set_enum_parameter(ImpliedDepositDateConvention, "impliedDepositDateConvention", value)
@property
def price_side(self):
"""
The type of price returned for pricing Analysis:
Bid(Bid value),
Ask(Ask value),
Mid(Mid value)
Optional. Defaults to 'Mid'.
:return: enum PriceSide
"""
return self._get_enum_parameter(PriceSide, "priceSide")
@price_side.setter
def price_side(self, value):
self._set_enum_parameter(PriceSide, "priceSide", value)
@property
def adjust_all_deposit_points_to_cross_calendars(self):
"""
An indicator if depositccy1marketdata and depositccy2marketdata are adjusted to
the cross calendar dates. the possible values are:
- true: the market data is adjusted according to the cross calendar dates,
- false: unmodified market data is returned, cross calendar is ignored.
optional. defaults to 'false'.
:return: bool
"""
return self._get_parameter("adjustAllDepositPointsToCrossCalendars")
@adjust_all_deposit_points_to_cross_calendars.setter
def adjust_all_deposit_points_to_cross_calendars(self, value):
self._set_parameter("adjustAllDepositPointsToCrossCalendars", value)
@property
def adjust_all_swap_points_to_cross_calendars(self):
"""
This flag define if cross-calendar holidays should be taken into account for
fx_swaps_ccy1 and fx_swaps_ccy2. it adjusts swap points according to cross
holidays if it's set to true, by default set to false.
:return: bool
"""
return self._get_parameter("adjustAllSwapPointsToCrossCalendars")
@adjust_all_swap_points_to_cross_calendars.setter
def adjust_all_swap_points_to_cross_calendars(self, value):
self._set_parameter("adjustAllSwapPointsToCrossCalendars", value)
@property
def ignore_ref_ccy_holidays(self):
"""
The reference currency holidays flag : When dates are computed, its possible to
choose if holidays of the reference currency are included or not in the pricing
Optional. Defaults to 'false'.
:return: bool
"""
return self._get_parameter("ignoreRefCcyHolidays")
@ignore_ref_ccy_holidays.setter
def ignore_ref_ccy_holidays(self, value):
self._set_parameter("ignoreRefCcyHolidays", value)
@property
def market_data_date(self):
"""
The market data date for pricing. Optional. By default, the market_data_date
date is the valuation_date or Today
:return: str
"""
return self._get_parameter("marketDataDate")
@market_data_date.setter
def market_data_date(self, value):
self._set_datetime_parameter("marketDataDate", value)
@property
def report_ccy(self):
"""
:return: str
"""
return self._get_parameter("reportCcy")
@report_ccy.setter
def report_ccy(self, value):
self._set_parameter("reportCcy", value)
@property
def use_direct_quote(self):
"""
An indicator if the spot and the swap points should be retrieved without a pivot
currency
- true: the market data is retrieved directly
- false: unmodified market data is returned. optional. defaults to 'false'.
:return: bool
"""
return self._get_parameter("useDirectQuote")
@use_direct_quote.setter
def use_direct_quote(self, value):
self._set_parameter("useDirectQuote", value)
@property
def valuation_date(self):
"""
The valuation date for pricing. Optional. If not set the valuation date is
equal to market_data_date or Today. For assets that contains a
settlementConvention, the default valuation date is equal to the settlementdate
of the Asset that is usually the TradeDate+SettlementConvention.
:return: str
"""
return self._get_parameter("valuationDate")
@valuation_date.setter
def valuation_date(self, value):
self._set_datetime_parameter("valuationDate", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/cross/_fx_cross_pricing_parameters.py | 0.950227 | 0.53868 | _fx_cross_pricing_parameters.py | pypi |
from typing import Optional, Union
from ..._enums import FxCrossType
from ._fx_cross_leg_definition import LegDefinition
from .._instrument_definition import InstrumentDefinition
class FxCrossInstrumentDefinition(InstrumentDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
instrument_tag : str, optional
User defined string to identify the instrument.It can be used to link output
results to the instrument definition. Only alphabetic, numeric and '- _.#=@'
characters are supported. Optional.
legs : list of LegDefinition, optional
Extra parameters to describe further the contract. 1 leg is mandatory for
Forwards and NDFs contracts. 2 legs are required for Swaps, and FwdFwdSwaps
contracts. Optional for Spot contracts.
fx_cross_type : FxCrossType or str, optional
The type of the Fx Cross instrument. Mandatory.
fx_cross_code : str, optional
The ISO code of the cross currency (e.g. 'EURCHF'). Mandatory.
ndf_fixing_settlement_ccy : str, optional
In case of a NDF contract, the ISO code of the settlement currency (e.g. 'EUR'
). Optional.
reference_spot_rate : float, optional
Contractual Spot Rate the counterparties agreed. It is used to compute the
traded_cross_rate as 'reference_spot_rate + traded_swap_points /
FxSwapPointScalingFactor'. In the case of a "FxSwap" contract, it is also used
to compute nearLeg.ContraAmount from nearLeg.DealAmount as
'nearLeg.ContraAmount = nearLeg.DealAmount * (reference_spot_rate /
FxCrossScalingFactor)'. Optional. Default value is null. In that case
traded_cross_rate and Leg ContraAmount may not be computed.
traded_cross_rate : float, optional
The contractual exchange rate agreed by the two counterparties. It is used to
compute the ContraAmount if the amount is not filled. In the case of a
'FxForward' and 'FxNonDeliverableForward' contract : ContraAmount is computed as
'DealAmount x traded_cross_rate / FxCrossScalingFactor'. In the case of a
'FxSwap' contract : farLeg.ContraAmount is computed as 'nearLeg.DealAmount x
traded_cross_rate / FxCrossScalingFactor'. Optional. Default value is null. It
means that if both ContraAmount and traded_cross_rate are sot set, market value
cannot be computed.
traded_swap_points : float, optional
Contractual forward points agreed by the two counterparties. It is used to
compute the traded_cross_rate as 'reference_spot_rate + traded_swap_points /
FxSwapPointScalingFactor'. Optional. Default value is null. In that case
traded_cross_rate and Leg ContraAmount may not be computed.
"""
def __init__(
self,
*,
instrument_tag: Optional[str] = None,
legs: Optional[LegDefinition] = None,
fx_cross_type: Union[FxCrossType, str] = None,
fx_cross_code: Optional[str] = None,
ndf_fixing_settlement_ccy: Optional[str] = None,
reference_spot_rate: Optional[float] = None,
traded_cross_rate: Optional[float] = None,
traded_swap_points: Optional[float] = None,
settlement_ccy: Optional[str] = None,
) -> None:
super().__init__()
self.instrument_tag = instrument_tag
self.legs = legs
self.fx_cross_type = fx_cross_type
self.fx_cross_code = fx_cross_code
self.ndf_fixing_settlement_ccy = ndf_fixing_settlement_ccy
self.reference_spot_rate = reference_spot_rate
self.traded_cross_rate = traded_cross_rate
self.traded_swap_points = traded_swap_points
self.settlement_ccy = settlement_ccy
def get_instrument_type(self):
return "FxCross"
@property
def fx_cross_type(self):
"""
The type of the Fx Cross instrument : 'FxSpot', 'FxForward',
'FxNonDeliverableForward', 'FxSwap', 'MultiLeg' or 'FxForwardForward'.
Mandatory.
:return: enum FxCrossType
"""
return self._get_enum_parameter(FxCrossType, "fxCrossType")
@fx_cross_type.setter
def fx_cross_type(self, value):
self._set_enum_parameter(FxCrossType, "fxCrossType", value)
@property
def legs(self):
"""
Extra parameters to describe further the contract. 1 leg is mandatory for
Forwards and NDFs contracts. 2 legs are required for Swaps, and FwdFwdSwaps
contracts. Optional for Spot contracts.
:return: list LegDefinition
"""
return self._get_list_parameter(LegDefinition, "legs")
@legs.setter
def legs(self, value):
self._set_list_parameter(LegDefinition, "legs", value)
@property
def fx_cross_code(self):
"""
The ISO code of the cross currency (e.g. 'EURCHF'). Mandatory.
:return: str
"""
return self._get_parameter("fxCrossCode")
@fx_cross_code.setter
def fx_cross_code(self, value):
self._set_parameter("fxCrossCode", value)
@property
def instrument_tag(self):
"""
User defined string to identify the instrument.It can be used to link output
results to the instrument definition. Only alphabetic, numeric and '- _.#=@'
characters are supported. Optional.
:return: str
"""
return self._get_parameter("instrumentTag")
@instrument_tag.setter
def instrument_tag(self, value):
self._set_parameter("instrumentTag", value)
@property
def ndf_fixing_settlement_ccy(self):
"""
In case of a NDF contract, the ISO code of the settlement currency (e.g. 'EUR'
). Optional.
:return: str
"""
return self._get_parameter("ndfFixingSettlementCcy")
@ndf_fixing_settlement_ccy.setter
def ndf_fixing_settlement_ccy(self, value):
self._set_parameter("ndfFixingSettlementCcy", value)
@property
def reference_spot_rate(self):
"""
Contractual Spot Rate the counterparties agreed. It is used to compute the
traded_cross_rate as 'reference_spot_rate + traded_swap_points /
FxSwapPointScalingFactor'. In the case of a "FxSwap" contract, it is also used
to compute nearLeg.ContraAmount from nearLeg.DealAmount as
'nearLeg.ContraAmount = nearLeg.DealAmount * (reference_spot_rate /
FxCrossScalingFactor)'. Optional. Default value is null. In that case
traded_cross_rate and Leg ContraAmount may not be computed.
:return: float
"""
return self._get_parameter("referenceSpotRate")
@reference_spot_rate.setter
def reference_spot_rate(self, value):
self._set_parameter("referenceSpotRate", value)
@property
def traded_cross_rate(self):
"""
The contractual exchange rate agreed by the two counterparties. It is used to
compute the ContraAmount if the amount is not filled. In the case of a
'FxForward' and 'FxNonDeliverableForward' contract : ContraAmount is computed as
'DealAmount x traded_cross_rate / FxCrossScalingFactor'. In the case of a
'FxSwap' contract : farLeg.ContraAmount is computed as 'nearLeg.DealAmount x
traded_cross_rate / FxCrossScalingFactor'. Optional. Default value is null. It
means that if both ContraAmount and traded_cross_rate are sot set, market value
cannot be computed.
:return: float
"""
return self._get_parameter("tradedCrossRate")
@traded_cross_rate.setter
def traded_cross_rate(self, value):
self._set_parameter("tradedCrossRate", value)
@property
def traded_swap_points(self):
"""
Contractual forward points agreed by the two counterparties. It is used to
compute the traded_cross_rate as 'reference_spot_rate + traded_swap_points /
FxSwapPointScalingFactor'. Optional. Default value is null. In that case
traded_cross_rate and Leg ContraAmount may not be computed.
:return: float
"""
return self._get_parameter("tradedSwapPoints")
@traded_swap_points.setter
def traded_swap_points(self, value):
self._set_parameter("tradedSwapPoints", value)
@property
def settlement_ccy(self):
return self._get_parameter("settlementCcy")
@settlement_ccy.setter
def settlement_ccy(self, value):
self._set_parameter("settlementCcy", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/cross/_fx_cross_definition.py | 0.954848 | 0.611904 | _fx_cross_definition.py | pypi |
from typing import Optional, Union
from ....._types import OptDateTime
from .._instrument_definition import ObjectDefinition
from ..._enums import (
FxLegType,
BuySell,
)
class LegDefinition(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
start_date : str or date or datetime or timedelta, optional
end_date : str or date or datetime or timedelta, optional
The maturity date of the contract that is the date the amounts are exchanged.
Either the end_date or the tenor must be provided.
tenor : str, optional
The tenor representing the maturity date of the contract (e.g. '1Y' or '6M' ).
Either the end_date or the tenor must be provided.
leg_tag : str, optional
A user defined string to identify the leg. Optional.
deal_ccy_buy_sell : BuySell or str, optional
The direction of the trade in terms of the deal currency.
Optional. Defaults to 'Buy'
fx_leg_type : FxLegType or str, optional
The enumeration that specifies the type of the leg. Mandatory for MultiLeg,
FwdFwdSwap, or Swap contracts. Optional for Spot and Forwards contracts.
contra_amount : float, optional
The unsigned amount exchanged to buy or sell the traded amount. Optional. By
default, it is calculated from the traded rate and the deal_amount. If no traded
rate is provided the market rate will be used.
contra_ccy : str, optional
The currency that is exchanged. Optional. By default, the second currency in the
FxCrossCode.
deal_amount : float, optional
The unsigned amount of traded currency actually bought or sold. Optional.
Defaults to 1,000,000'.
deal_ccy : str, optional
The ISO code of the traded currency (e.g. 'EUR' ). Optional. Defaults to the
first currency of the FxCrossCode.
start_tenor : str, optional
The tenor representing the Starting of maturity period of the contract (e.g.
'1Y' or '6M' ). Either the start_date or the start_tenor must be provided for
TimeOptionForward.
"""
def __init__(
self,
start_date: "OptDateTime" = None,
end_date: "OptDateTime" = None,
tenor: Optional[str] = None,
leg_tag: Optional[str] = None,
deal_ccy_buy_sell: Union[BuySell, str] = None,
fx_leg_type: Union[FxLegType, str] = None,
contra_amount: Optional[float] = None,
contra_ccy: Optional[str] = None,
deal_amount: Optional[float] = None,
deal_ccy: Optional[str] = None,
start_tenor: Optional[str] = None,
) -> None:
super().__init__()
self.start_date = start_date
self.end_date = end_date
self.tenor = tenor
self.leg_tag = leg_tag
self.deal_ccy_buy_sell = deal_ccy_buy_sell
self.fx_leg_type = fx_leg_type
self.contra_amount = contra_amount
self.contra_ccy = contra_ccy
self.deal_amount = deal_amount
self.deal_ccy = deal_ccy
self.start_tenor = start_tenor
@property
def deal_ccy_buy_sell(self):
"""
The direction of the trade in terms of the deal currency : 'Buy' or 'Sell'.
Optional. Defaults to 'Buy'
:return: enum BuySell
"""
return self._get_enum_parameter(BuySell, "dealCcyBuySell")
@deal_ccy_buy_sell.setter
def deal_ccy_buy_sell(self, value):
self._set_enum_parameter(BuySell, "dealCcyBuySell", value)
@property
def fx_leg_type(self):
"""
The enumeration that specifies the type of the leg : 'Spot', 'FxForward',
'FxNonDeliverableForward', 'SwapNear' or 'SwapFar'. Mandatory for MultiLeg,
FwdFwdSwap, or Swap contracts. Optional for Spot and Forwards contracts.
:return: enum FxLegType
"""
return self._get_enum_parameter(FxLegType, "fxLegType")
@fx_leg_type.setter
def fx_leg_type(self, value):
self._set_enum_parameter(FxLegType, "fxLegType", value)
@property
def contra_amount(self):
"""
The unsigned amount exchanged to buy or sell the traded amount. Optional. By
default, it is calculated from the traded rate and the deal_amount. If no traded
rate is provided the market rate will be used.
:return: float
"""
return self._get_parameter("contraAmount")
@contra_amount.setter
def contra_amount(self, value):
self._set_parameter("contraAmount", value)
@property
def contra_ccy(self):
"""
The currency that is exchanged. Optional. By default, the second currency in the
FxCrossCode.
:return: str
"""
return self._get_parameter("contraCcy")
@contra_ccy.setter
def contra_ccy(self, value):
self._set_parameter("contraCcy", value)
@property
def deal_amount(self):
"""
The unsigned amount of traded currency actually bought or sold. Optional.
Defaults to 1,000,000'.
:return: float
"""
return self._get_parameter("dealAmount")
@deal_amount.setter
def deal_amount(self, value):
self._set_parameter("dealAmount", value)
@property
def deal_ccy(self):
"""
The ISO code of the traded currency (e.g. 'EUR' ). Optional. Defaults to the
first currency of the FxCrossCode.
:return: str
"""
return self._get_parameter("dealCcy")
@deal_ccy.setter
def deal_ccy(self, value):
self._set_parameter("dealCcy", value)
@property
def end_date(self):
"""
The maturity date of the contract that is the date the amounts are exchanged.
Either the end_date or the tenor must be provided.
:return: str
"""
return self._get_parameter("endDate")
@end_date.setter
def end_date(self, value):
self._set_datetime_parameter("endDate", value)
@property
def leg_tag(self):
"""
A user defined string to identify the leg. Optional.
:return: str
"""
return self._get_parameter("legTag")
@leg_tag.setter
def leg_tag(self, value):
self._set_parameter("legTag", value)
@property
def start_date(self):
"""
:return: str
"""
return self._get_parameter("startDate")
@start_date.setter
def start_date(self, value):
self._set_datetime_parameter("startDate", value)
@property
def start_tenor(self):
"""
The tenor representing the Starting of maturity period of the contract (e.g.
'1Y' or '6M' ). Either the start_date or the Starttenor must be provided for
TimeOptionForward.
:return: str
"""
return self._get_parameter("startTenor")
@start_tenor.setter
def start_tenor(self, value):
self._set_parameter("startTenor", value)
@property
def tenor(self):
"""
The tenor representing the maturity date of the contract (e.g. '1Y' or '6M' ).
Either the end_date or the tenor must be provided.
:return: str
"""
return self._get_parameter("tenor")
@tenor.setter
def tenor(self, value):
self._set_parameter("tenor", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/cross/_fx_cross_leg_definition.py | 0.948644 | 0.551453 | _fx_cross_leg_definition.py | pypi |
from typing import Optional, List, TYPE_CHECKING
from ..._enums import FxCrossType
from ._fx_cross_definition import FxCrossInstrumentDefinition
from ._fx_cross_leg_definition import LegDefinition
from ._fx_cross_pricing_parameters import PricingParameters
from .._base_definition import BaseDefinition
from ....._tools import try_copy_to_list
if TYPE_CHECKING:
from ....._types import ExtendedParams, OptStrStrs
class Definition(BaseDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
instrument_tag : str, optional
User defined string to identify the instrument.It can be used to link output
results to the instrument definition. Only alphabetic, numeric and '- _.#=@'
characters are supported. Optional.
legs : list of LegDefinition, optional
Extra parameters to describe further the contract. 1 leg is mandatory for
Forwards and NDFs contracts. 2 legs are required for Swaps, and FwdFwdSwaps
contracts. Optional for Spot contracts.
fx_cross_type : FxCrossType or str, optional
The type of the Fx Cross instrument. Mandatory.
fx_cross_code : str, optional
The ISO code of the cross currency (e.g. 'EURCHF'). Mandatory.
ndf_fixing_settlement_ccy : str, optional
In case of a NDF contract, the ISO code of the settlement currency (e.g. 'EUR'
). Optional.
reference_spot_rate : float, optional
Contractual Spot Rate the counterparties agreed. It is used to compute the
traded_cross_rate as 'reference_spot_rate + traded_swap_points /
FxSwapPointScalingFactor'. In the case of a "FxSwap" contract, it is also used
to compute nearLeg.ContraAmount from nearLeg.DealAmount as
'nearLeg.ContraAmount = nearLeg.DealAmount * (reference_spot_rate /
FxCrossScalingFactor)'. Optional. Default value is null. In that case
traded_cross_rate and Leg ContraAmount may not be computed.
traded_cross_rate : float, optional
The contractual exchange rate agreed by the two counterparties. It is used to
compute the ContraAmount if the amount is not filled. In the case of a
'FxForward' and 'FxNonDeliverableForward' contract : ContraAmount is computed as
'DealAmount x traded_cross_rate / FxCrossScalingFactor'. In the case of a
'FxSwap' contract : farLeg.ContraAmount is computed as 'nearLeg.DealAmount x
traded_cross_rate / FxCrossScalingFactor'. Optional. Default value is null. It
emans that if both ContraAmount and traded_cross_rate are sot set, market value
cannot be computed.
traded_swap_points : float, optional
Contractual forward points agreed by the two counterparties. It is used to
compute the traded_cross_rate as 'reference_spot_rate + traded_swap_points /
FxSwapPointScalingFactor'. Optional. Default value is null. In that case
traded_cross_rate and Leg ContraAmount may not be computed.
fields: list of str, optional
Contains the list of Analytics that the quantitative analytic service will
compute.
pricing_parameters : PricingParameters, optional
The pricing parameters to apply to this instrument. Optional. If pricing
parameters are not provided at this level parameters defined globally at the
request level are used. If no pricing parameters are provided globally default
values apply.
extended_params : dict, optional
If necessary other parameters.
settlement_ccy : str, optional
This settlement currency code in case of an FxNonDeliverableForward (NDF) contract.
The value is expressed in ISO 4217 alphabetical format (e.g., 'USD').
Default value is 'USD'.
Methods
-------
get_data(session=session, on_response=on_response)
Returns a response to the data platform
get_stream(session=session)
Get stream quantitative analytic service subscription
Examples
--------
>>> import refinitiv.data.content.ipa.financial_contracts as rdf
>>> definition = rdf.cross.Definition(
... instrument_tag="00102700008910C",
... fx_cross_type=rdf.cross.FxCrossType.FX_FORWARD,
... fx_cross_code="USDEUR",
... legs=[rdf.cross.LegDefinition(end_date="2015-04-09T00:00:00Z")],
... pricing_parameters=rdf.cross.PricingParameters(
... valuation_date="2015-02-02T00:00:00Z",
... price_side=rdf.cross.PriceSide.MID,
... ),
... fields=[
... "InstrumentTag",
... "ValuationDate",
... "InstrumentDescription",
... "FxOutrightCcy1Ccy2",
... ],
... )
>>> response = definition.get_data()
>>> response.data.df
Using get_stream
>>> response = definition.get_stream()
"""
def __init__(
self,
instrument_tag: Optional[str] = None,
legs: Optional[List[LegDefinition]] = None,
fx_cross_type: Optional[FxCrossType] = None,
fx_cross_code: Optional[str] = None,
ndf_fixing_settlement_ccy: Optional[str] = None,
reference_spot_rate: Optional[float] = None,
traded_cross_rate: Optional[float] = None,
traded_swap_points: Optional[float] = None,
fields: "OptStrStrs" = None,
pricing_parameters: Optional[PricingParameters] = None,
extended_params: "ExtendedParams" = None,
settlement_ccy: Optional[str] = None,
):
legs = try_copy_to_list(legs)
fields = try_copy_to_list(fields)
definition = FxCrossInstrumentDefinition(
fx_cross_type=fx_cross_type,
legs=legs,
fx_cross_code=fx_cross_code,
instrument_tag=instrument_tag,
ndf_fixing_settlement_ccy=ndf_fixing_settlement_ccy,
reference_spot_rate=reference_spot_rate,
traded_cross_rate=traded_cross_rate,
traded_swap_points=traded_swap_points,
settlement_ccy=settlement_ccy,
)
super().__init__(
definition=definition,
fields=fields,
pricing_parameters=pricing_parameters,
extended_params=extended_params,
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/cross/_definition.py | 0.947563 | 0.510985 | _definition.py | pypi |
from typing import Optional, Union
from ....._types import OptDateTime
from ..._enums import (
BusinessDayConvention,
DateRollingConvention,
DayCountBasis,
Frequency,
)
from .._instrument_definition import InstrumentDefinition
class TermDepositInstrumentDefinition(InstrumentDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
instrument_tag : str, optional
User defined string to identify the instrument. It can be used to link output
results to the instrument definition. Only alphabetic, numeric and '- _.#=@'
characters are supported.
instrument_code : str, optional
Code to define the term deposit instrument. For the moment, only RICs for CDs
and Wholesales deposits are supported, with deposit code (ex:"EUR1MD=").
start_date : str or date or datetime or timedelta, optional
The date the term deposit starts accruing interest. Its effective date.
By default it is derived from the ValuationDate and the day to spot convention
of the contract currency.
end_date : str or date or datetime or timedelta, optional
The maturity date of the term deposit contract. Either the endDate or the tenor
must be provided.
tenor : str, optional
The period code that represents the time between the start date and end date of
the contract.
Mandatory if instrumentCode is null. Either the endDate or the tenor must be
provided.
notional_ccy : str, optional
The ISO code of the notional currency.
Should be explicitly specified if InstrumentCode hasn't been specified.
May be retrieved from reference data.
notional_amount : float, optional
The notional amount of the term deposit at the start date.
By default 1,000,000 is used.
fixed_rate_percent : float, optional
Fixed interest rate percent to be applied for notional by deal terms.
Mandatory if instrument_code is None.
payment_business_day_convention : BusinessDayConvention or str, optional
The method to adjust dates to a working day.
By default 'ModifiedFollowing'.
payment_roll_convention : DateRollingConvention or str, optional
Method to adjust payment dates when they fall at the end of the month.
By default 'Last'.
year_basis : DayCountBasis or str, optional
The Day Count Basis method used to calculate the interest payments.
By default 'Dcb_Actual_365'.
calendar : str, optional
Calendar used to adjust deposit duration calculation.
By default the calendar corresponding to notional currency is used.
interest_payment_frequency : Frequency or str, optional
The interest payment frequency.
interest_calculation_method : DayCountBasis or str, optional
The day count basis method used to calculate the interest payments.
"""
def __init__(
self,
*,
instrument_tag: Optional[str] = None,
instrument_code: Optional[str] = None,
start_date: "OptDateTime" = None,
end_date: "OptDateTime" = None,
tenor: Optional[str] = None,
notional_ccy: Optional[str] = None,
notional_amount: Optional[float] = None,
fixed_rate_percent: Optional[float] = None,
payment_business_day_convention: Union[BusinessDayConvention, str] = None,
payment_roll_convention: Union[DateRollingConvention, str] = None,
year_basis: Union[DayCountBasis, str] = None,
calendar: Optional[str] = None,
interest_payment_frequency: Union[Frequency, str] = None,
interest_calculation_method: Union[DayCountBasis, str] = None,
payment_business_days: Optional[str] = None,
start_tenor: Optional[str] = None,
):
super().__init__()
self.instrument_tag = instrument_tag
self.instrument_code = instrument_code
self.start_date = start_date
self.end_date = end_date
self.tenor = tenor
self.notional_ccy = notional_ccy
self.notional_amount = notional_amount
self.fixed_rate_percent = fixed_rate_percent
self.payment_business_day_convention = payment_business_day_convention
self.payment_roll_convention = payment_roll_convention
self.year_basis = year_basis
self.calendar = calendar
self.interest_payment_frequency = interest_payment_frequency
self.interest_calculation_method = interest_calculation_method
self.payment_business_days = payment_business_days
self.start_tenor = start_tenor
def get_instrument_type(self):
return "TermDeposit"
@property
def payment_business_day_convention(self):
"""
The method to adjust dates to a working day.
The possible values are:
- ModifiedFollowing (adjusts dates according to the Modified Following convention - next business day unless is it goes
into the next month,
preceeding is used in that case),
- NextBusinessDay (adjusts dates according to the Following convention - Next Business Day),
- PreviousBusinessDay (adjusts dates according to the Preceeding convention - Previous Business Day),
- NoMoving (does not adjust dates),
- BbswModifiedFollowing (adjusts dates according to the BBSW Modified Following convention).
Optional. In case an instrument code/style has been defined, value comes from bond reference data. Otherwise
'ModifiedFollowing' is used.
:return: enum BusinessDayConvention
"""
return self._get_enum_parameter(BusinessDayConvention, "paymentBusinessDayConvention")
@payment_business_day_convention.setter
def payment_business_day_convention(self, value):
self._set_enum_parameter(BusinessDayConvention, "paymentBusinessDayConvention", value)
@property
def payment_roll_convention(self):
"""
Method to adjust payment dates when they fall at the end of the month (28th of February, 30th, 31st).
The possible values are:
- Last (For setting the calculated date to the last working day),
- Same (For setting the calculated date to the same day . In this latter case, the date may be moved according to the date
moving
convention if it is a non-working day),
- Last28 (For setting the calculated date to the last working day. 28FEB being always considered as the last working day),
- Same28 (For setting the calculated date to the same day .28FEB being always considered as the last working day).
Optional. In case an instrument code has been defined, value comes from bond reference data. Otherwise, 'SameDay' is used.
:return: enum DateRollingConvention
"""
return self._get_enum_parameter(DateRollingConvention, "paymentRollConvention")
@payment_roll_convention.setter
def payment_roll_convention(self, value):
self._set_enum_parameter(DateRollingConvention, "paymentRollConvention", value)
@property
def year_basis(self):
"""
The Day Count Basis method used to calculate the interest payments.
Dcb_Actual_365 used by default.
:return: enum DayCountBasis
"""
return self._get_enum_parameter(DayCountBasis, "yearBasis")
@year_basis.setter
def year_basis(self, value):
self._set_enum_parameter(DayCountBasis, "yearBasis", value)
@property
def calendar(self):
"""
Calendar used to adjust deposit duration calculation.
By default the calendar corresponding to notional currency is used.
:return: str
"""
return self._get_parameter("calendar")
@calendar.setter
def calendar(self, value):
self._set_parameter("calendar", value)
@property
def end_date(self):
"""
The maturity date of the term deposit contract.
Mandatory.
Either the endDate or the tenor must be provided.
:return: str
"""
return self._get_parameter("endDate")
@end_date.setter
def end_date(self, value):
self._set_datetime_parameter("endDate", value)
@property
def fixed_rate_percent(self):
"""
Fixed interest rate percent to be applied for notional by deal terms.
E.g. "10" means 10%
:return: float
"""
return self._get_parameter("fixedRatePercent")
@fixed_rate_percent.setter
def fixed_rate_percent(self, value):
self._set_parameter("fixedRatePercent", value)
@property
def instrument_code(self):
"""
Code to define the term deposit instrument.
For the moment, only RICs for CDs and Wholesales deposits are supported, with deposit code (ex:"EUR1MD=").
:return: str
"""
return self._get_parameter("instrumentCode")
@instrument_code.setter
def instrument_code(self, value):
self._set_parameter("instrumentCode", value)
@property
def instrument_tag(self):
"""
User defined string to identify the instrument.
It can be used to link output results to the instrument definition.
Only alphabetic, numeric and '- _.#=@' characters are supported.
Optional.
:return: str
"""
return self._get_parameter("instrumentTag")
@instrument_tag.setter
def instrument_tag(self, value):
self._set_parameter("instrumentTag", value)
@property
def notional_amount(self):
"""
The notional amount of the term deposit at the start date.
Optional.
By default 1,000,000 is used.
:return: float
"""
return self._get_parameter("notionalAmount")
@notional_amount.setter
def notional_amount(self, value):
self._set_parameter("notionalAmount", value)
@property
def notional_ccy(self):
"""
The ISO code of the notional currency.
Should be explicitly specified if InstrumentCode hasn't been specified.
May be retrieved from reference data.
:return: str
"""
return self._get_parameter("notionalCcy")
@notional_ccy.setter
def notional_ccy(self, value):
self._set_parameter("notionalCcy", value)
@property
def start_date(self):
"""
The date the term deposit starts accruing interest. Its effective date.
Optional. By default it is derived from the ValuationDate and the day to spot convention of the contract currency.
:return: str
"""
return self._get_parameter("startDate")
@start_date.setter
def start_date(self, value):
self._set_datetime_parameter("startDate", value)
@property
def tenor(self):
"""
The period code that represents the time between the start date and end date of the contract.
Mandatory if instrumentCode is null.
Either the endDate or the tenor must be provided.
Sample value: 1M
:return: str
"""
return self._get_parameter("tenor")
@tenor.setter
def tenor(self, value):
self._set_parameter("tenor", value)
@property
def interest_payment_frequency(self):
return self._get_enum_parameter(Frequency, "interestPaymentFrequency")
@interest_payment_frequency.setter
def interest_payment_frequency(self, value):
self._set_enum_parameter(Frequency, "interestPaymentFrequency", value)
@property
def interest_calculation_method(self):
return self._get_enum_parameter(DayCountBasis, "interestCalculationMethod")
@interest_calculation_method.setter
def interest_calculation_method(self, value):
self._set_enum_parameter(DayCountBasis, "interestCalculationMethod", value)
@property
def payment_business_days(self):
return self._get_parameter("paymentBusinessDays")
@payment_business_days.setter
def payment_business_days(self, value):
self._set_parameter("paymentBusinessDays", value)
@property
def start_tenor(self):
return self._get_parameter("startTenor")
@start_tenor.setter
def start_tenor(self, value):
self._set_parameter("startTenor", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/term_deposit/_term_deposit_definition.py | 0.958187 | 0.564999 | _term_deposit_definition.py | pypi |
from typing import Optional, Union
from ..._enums import PriceSide
from ..._object_definition import ObjectDefinition
from ....._types import OptDateTime
class PricingParameters(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
price_side : PriceSide or str, optional
Price Side to consider when retrieving Market Data.
market_data_date : str or date or datetime or timedelta, optional
The market data date for pricing.
By default, the market_data_date date is the valuation_date or Today.
report_ccy : str, optional
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). It is set for the fields ending with 'xxxinreportccy'. Optional. The
default value is the notional currency.
valuation_date : str or date or datetime or timedelta, optional
The valuation date for pricing. If not set the valuation date is equal to
market_data_date or Today. For assets that contains a settlementConvention, the
default valuation date is equal to the settlementdate of the Asset that is
usually the TradeDate+SettlementConvention.
Examples
--------
>>> import refinitiv.data.content.ipa.financial_contracts as rdf
>>> rdf.term_deposit.PricingParameters(valuation_date="2020-04-24")
"""
_income_tax_percent = None
def __init__(
self,
price_side: Union[PriceSide, str] = None,
income_tax_percent: Optional[float] = None,
market_data_date: "OptDateTime" = None,
report_ccy: Optional[str] = None,
valuation_date: "OptDateTime" = None,
):
super().__init__()
self.price_side = price_side
self.income_tax_percent = income_tax_percent
self.market_data_date = market_data_date
self.report_ccy = report_ccy
self.valuation_date = valuation_date
@property
def price_side(self):
"""
Price Side to consider when retrieving Market Data.
:return: enum PriceSide
"""
return self._get_enum_parameter(PriceSide, "priceSide")
@price_side.setter
def price_side(self, value):
self._set_enum_parameter(PriceSide, "priceSide", value)
@property
def income_tax_percent(self):
return self._income_tax_percent
@income_tax_percent.setter
def income_tax_percent(self, value):
self._income_tax_percent = value
@property
def market_data_date(self):
"""
The market data date for pricing.
By default, the marketDataDate date is the ValuationDate or Today.
:return: str
"""
return self._get_parameter("marketDataDate")
@market_data_date.setter
def market_data_date(self, value):
self._set_datetime_parameter("marketDataDate", value)
@property
def report_ccy(self):
"""
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). it is set for the fields ending with 'xxxinreportccy'. optional. the
default value is the notional currency.
:return: str
"""
return self._get_parameter("reportCcy")
@report_ccy.setter
def report_ccy(self, value):
self._set_parameter("reportCcy", value)
@property
def valuation_date(self):
"""
The date at which the instrument is valued. the value is expressed in iso 8601
format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). by default,
marketdatadate is used. if marketdatadate is not specified, the default value is
today.
:return: str
"""
return self._get_parameter("valuationDate")
@valuation_date.setter
def valuation_date(self, value):
self._set_datetime_parameter("valuationDate", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/term_deposit/_term_deposit_pricing_parameters.py | 0.954795 | 0.389779 | _term_deposit_pricing_parameters.py | pypi |
from typing import Optional, TYPE_CHECKING, Union
from ._term_deposit_definition import TermDepositInstrumentDefinition
from ._term_deposit_pricing_parameters import PricingParameters
from .._base_definition import BaseDefinition
from ..._enums import (
BusinessDayConvention,
DateRollingConvention,
DayCountBasis,
Frequency,
)
from ....._tools import create_repr, try_copy_to_list
if TYPE_CHECKING:
from ....._types import ExtendedParams, OptStrStrs, OptDateTime
class Definition(BaseDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
instrument_code : str, optional
Code to define the term deposit instrument. For the moment, only RICs for CDs
and Wholesales deposits are supported, with deposit code (ex:"EUR1MD=").
instrument_tag : str, optional
User defined string to identify the instrument. It can be used to link output
results to the instrument definition. Only alphabetic, numeric and '- _.#=@'
characters are supported.
start_date : str or date or datetime or timedelta, optional
The date the term deposit starts accruing interest. Its effective date.
By default it is derived from the ValuationDate and the day to spot convention
of the contract currency.
end_date : str or date or datetime or timedelta, optional
The maturity date of the term deposit contract. Either the endDate or the tenor
must be provided.
tenor : str, optional
The period code that represents the time between the start date and end date of
the contract.
Mandatory if instrumentCode is null. Either the endDate or the tenor must be
provided.
notional_ccy : str, optional
The ISO code of the notional currency.
Should be explicitly specified if InstrumentCode hasn't been specified.
May be retrieved from reference data.
notional_amount : float, optional
The notional amount of the term deposit at the start date.
By default 1,000,000 is used.
fixed_rate_percent : float, optional
Fixed interest rate percent to be applied for notional by deal terms.
Mandatory if instrument_code is None.
payment_business_day_convention : BusinessDayConvention, optional
The method to adjust dates to a working day.
By default 'ModifiedFollowing'.
payment_roll_convention : DateRollingConvention, optional
Method to adjust payment dates when they fall at the end of the month.
By default 'Last'.
year_basis : DayCountBasis or str, optional
The Day Count Basis method used to calculate the interest payments.
By default 'Dcb_Actual_365'.
calendar : str, optional
Calendar used to adjust deposit duration calculation.
By default the calendar corresponding to notional currency is used.
fields : list of str, optional
Contains the list of Analytics that the quantitative analytic service will
compute.
pricing_parameters : PricingParameters, optional
The pricing parameters to apply to this instrument. If pricing parameters
are not provided at this level parameters defined globally at the request
level are used. If no pricing parameters are provided globally default
values apply.
extended_params : dict, optional
If necessary other parameters.
interest_payment_frequency : Frequency or str, optional
The frequency of the interest payment.
The default value is zero.
interest_calculation_method : DayCountBasis or str, optional
The day count basis method used to calculate the interest payments. The possible values are listed here.
Mandatory if no instrumentCode is defined.
payment_business_days : str, optional
A comma-separated calendar code used to adjust dates(e.g., 'EMU' or 'USA').
The default value is the calendar associated to the market conventions of NotionalCcy.
start_tenor : str, optional
The code indicating the period from a spot date to startDate of the instrument (e.g., '1M').
Either startDate or startTenor can be specified, but not both.
Methods
-------
get_data(session=session, on_response=on_response, async_mode=None)
Returns a response to the data platform
get_data_async(session=session, on_response=on_response, async_mode=None)
Returns a response to the async data platform
get_stream(session=session)
Get stream quantitative analytic service subscription
Examples
--------
>>> import refinitiv.data.content.ipa.financial_contracts as rdf
>>> definition = rdf.term_deposit.Definition(
... tenor="5Y",
... notional_ccy="EUR",
... fixed_rate_percent=11
...)
>>> response = definition.get_data()
Using get_stream
>>> stream = definition.get_stream()
>>> stream.open()
"""
def __init__(
self,
instrument_code: Optional[str] = None,
instrument_tag: Optional[str] = None,
start_date: "OptDateTime" = None,
end_date: "OptDateTime" = None,
tenor: Optional[str] = None,
notional_ccy: Optional[str] = None,
notional_amount: Optional[float] = None,
fixed_rate_percent: Optional[float] = None,
payment_business_day_convention: Union[BusinessDayConvention, str] = None,
payment_roll_convention: Union[DateRollingConvention, str] = None,
year_basis: Union[DayCountBasis, str] = None,
calendar: Optional[str] = None,
fields: "OptStrStrs" = None,
pricing_parameters: Optional[PricingParameters] = None,
extended_params: "ExtendedParams" = None,
interest_payment_frequency: Union[Frequency, str] = None,
interest_calculation_method: Union[DayCountBasis, str] = None,
payment_business_days: Optional[str] = None,
start_tenor: Optional[str] = None,
):
fields = try_copy_to_list(fields)
definition = TermDepositInstrumentDefinition(
payment_business_day_convention=payment_business_day_convention,
payment_roll_convention=payment_roll_convention,
year_basis=year_basis,
calendar=calendar,
end_date=end_date,
fixed_rate_percent=fixed_rate_percent,
instrument_code=instrument_code,
instrument_tag=instrument_tag,
notional_amount=notional_amount,
notional_ccy=notional_ccy,
start_date=start_date,
tenor=tenor,
interest_payment_frequency=interest_payment_frequency,
interest_calculation_method=interest_calculation_method,
payment_business_days=payment_business_days,
start_tenor=start_tenor,
)
super().__init__(
definition=definition,
fields=fields,
pricing_parameters=pricing_parameters,
extended_params=extended_params,
)
def __repr__(self):
return create_repr(
self,
middle_path="content.ipa.financial_contracts.term_deposit",
) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/term_deposit/_definition.py | 0.942744 | 0.490846 | _definition.py | pypi |
from typing import Optional, Union
from ....._types import OptDateTime
from ._enums import (
BuySell,
CallPut,
ExerciseStyle,
UnderlyingType,
)
from .._instrument_definition import InstrumentDefinition
class OptionDefinition(InstrumentDefinition):
"""
Parameters
----------
instrument_tag : str, optional
User defined string to identify the instrument.It can be used to link output
results to the instrument definition. Only alphabetic, numeric and '- _.#=@'
characters are supported. Optional.
end_date : str or date or datetime or timedelta, optional
Expiry date of the option.
buy_sell : BuySell or str, optional
The side of the deal.
call_put : CallPut or str, optional
Tells if the option is a call or a put.
exercise_style : ExerciseStyle or str, optional
The option style based on its exercise restrictions.
underlying_type : UnderlyingType or str, optional
Underlying type of the option.
strike : float, optional
strike of the option
"""
def __init__(
self,
instrument_tag: Optional[str] = None,
start_date: "OptDateTime" = None,
end_date: "OptDateTime" = None,
buy_sell: Union[BuySell, str] = None,
call_put: Union[CallPut, str] = None,
exercise_style: Union[ExerciseStyle, str] = None,
underlying_type: Union[UnderlyingType, str] = None,
strike: Optional[float] = None,
**kwargs,
) -> None:
super().__init__(instrument_tag, **kwargs)
self.instrument_tag = instrument_tag
self.start_date = start_date
self.end_date = end_date
self.buy_sell = buy_sell
self.call_put = call_put
self.exercise_style = exercise_style
self.underlying_type = underlying_type
self.strike = strike
def get_instrument_type(self):
return "Option"
@property
def buy_sell(self):
"""
The side of the deal. Possible values:
- Buy
- Sell
:return: enum BuySell
"""
return self._get_enum_parameter(BuySell, "buySell")
@buy_sell.setter
def buy_sell(self, value):
self._set_enum_parameter(BuySell, "buySell", value)
@property
def call_put(self):
"""
Tells if the option is a call or a put. Possible values:
- Call
- Put
:return: enum CallPut
"""
return self._get_enum_parameter(CallPut, "callPut")
@call_put.setter
def call_put(self, value):
self._set_enum_parameter(CallPut, "callPut", value)
@property
def exercise_style(self):
"""
EURO or AMER
:return: enum ExerciseStyle
"""
return self._get_enum_parameter(ExerciseStyle, "exerciseStyle")
@exercise_style.setter
def exercise_style(self, value):
self._set_enum_parameter(ExerciseStyle, "exerciseStyle", value)
@property
def underlying_type(self):
"""
Underlying type of the option. Possible values:
- Eti
- Fx
:return: enum UnderlyingType
"""
return self._get_enum_parameter(UnderlyingType, "underlyingType")
@underlying_type.setter
def underlying_type(self, value):
self._set_enum_parameter(UnderlyingType, "underlyingType", value)
@property
def end_date(self):
"""
Expiry date of the option
:return: str
"""
return self._get_parameter("endDate")
@end_date.setter
def end_date(self, value):
self._set_datetime_parameter("endDate", value)
@property
def start_date(self):
"""
Start date of the option
:return: str
"""
return self._get_parameter("startDate")
@start_date.setter
def start_date(self, value):
self._set_datetime_parameter("startDate", value)
@property
def strike(self):
"""
strike of the option
:return: float
"""
return self._get_parameter("strike")
@strike.setter
def strike(self, value):
self._set_parameter("strike", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/option/_option_definition.py | 0.935722 | 0.336222 | _option_definition.py | pypi |
from typing import Optional, List, Union
from ....._tools import try_copy_to_list
from ..._object_definition import ObjectDefinition
from ._enums import (
FxSwapCalculationMethod,
OptionVolatilityType,
PriceSide,
PricingModelType,
TimeStamp,
VolatilityModel,
)
from ..._models import (
BidAskMid,
InterpolationWeight,
PayoutScaling,
)
from ....._types import OptDateTime
class PricingParameters(ObjectDefinition):
"""
API endpoint for Financial Contract analytics,
that returns calculations relevant to each contract type.
Parameters
----------
atm_volatility_object : BidAskMid, optional
butterfly10_d_object : BidAskMid, optional
butterfly25_d_object : BidAskMid, optional
domestic_deposit_rate_percent_object : BidAskMid, optional
foreign_deposit_rate_percent_object : BidAskMid, optional
forward_points_object : BidAskMid, optional
fx_spot_object : BidAskMid, optional
fx_swap_calculation_method : FxSwapCalculationMethod or str, optional
The method used to calculate an outright price or deposit rates.
implied_volatility_object : BidAskMid, optional
interpolation_weight : InterpolationWeight, optional
option_price_side : PriceSide or str, optional
The quoted price side of the instrument. Optional. the default values for listed options are:
- ask: if buysell is set to 'buy',
- bid: if buysell is set to 'sell',
- last: if buysell is not provided. the default value for otc options is 'mid'.
option_time_stamp : TimeStamp, optional
The mode of the instrument's timestamp selection. Optional.the default value is 'default'.
payout_custom_dates : string, optional
The array of dates set by a user for the payout/volatility chart. optional.no
default value applies.
payout_scaling_interval : PayoutScaling, optional
price_side : PriceSide, optional
The quoted price side of the instrument.
pricing_model_type : PricingModelType, optional
The model type of the option pricing. Optional. the default value depends on the option type.
risk_reversal10_d_object : BidAskMid, optional
risk_reversal25_d_object : BidAskMid, optional
underlying_price_side : PriceSide or str, optional
The quoted price side of the underlying asset. Optional. the default values are:
- ask: if buysell is set to 'buy',
- bid: if buysell is set to 'sell',
- last: if buysell is not provided.
underlying_time_stamp : TimeStamp or str, optional
The mode of the underlying asset's timestamp selection. Optional.the default value is
'default'.
volatility_model : VolatilityModel, optional
The model used to build the volatility surface. the possible values are:
- sabr,
- cubicspline,
- svi,
- twinlognormal,
- vannavolga10d,
- vannavolga25d.
volatility_type : OptionVolatilityType or str, optional
The type of volatility for the option pricing. Optional. the default value is 'implied'.
compute_payout_chart : bool, optional
Define whether the payout chart must be computed or not
compute_volatility_payout : bool, optional
Define whether the volatility payout chart must be computed or not
cutoff_time : str, optional
The cutoff time
cutoff_time_zone : str, optional
The cutoff time zone
market_data_date : str or date or datetime or timedelta, optional
The date at which the market data is retrieved. the value is expressed in iso
8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). it
should be less or equal tovaluationdate). optional. by
default,marketdatadateisvaluationdateor today.
market_value_in_deal_ccy : float, optional
The market value (premium) of the instrument. the value is expressed in the deal
currency. it is used to define optionprice and compute volatilitypercent. if
marketvalueindealccy is defined, optionpriceside and volatilitypercent are not
taken into account; marketvalueindealccy and marketvalueinreportccy cannot be
overriden at a time. optional. by default, it is equal to optionprice for listed
options or computed from volatilitypercent for otc options.
market_value_in_report_ccy : float, optional
The market value (premium) of the instrument. it is computed as
[marketvalueindealccy fxspot]. the value is expressed in the reporting
currency. it is used to define optionprice and computevolatilitypercent.
ifmarketvalueinreportccyis defined, optionpriceside and volatilitypercentinputs
are not taken into account; marketvalueindealccy and marketvalueinreportccy
cannot be overriden at a time. optional. by default, fxspot rate is retrieved
from the market data.
report_ccy : str, optional
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). it is set for the fields ending with 'xxxinreportccy'. optional. the
default value is the notional currency.
report_ccy_rate : float, optional
The rate of the reporting currency against the option currency. it can be used
to calculate optionprice and marketvalueindealccy if marketvalueinreportccy is
defined. optional.by default, it is retrieved from the market data.
risk_free_rate_percent : float, optional
A risk-free rate of the option currency used for the option pricing. optional.
by default, the value is retrieved from the market data.
simulate_exercise : bool, optional
Tells if payoff-linked cashflow should be returned. possible values:
- true
- false
underlying_price : float, optional
The price of the underlying asset. the value is expressed in the deal currency.
if underlyingprice is defined, underlyingpriceside is not taken into account.
optional. by default, the value is retrieved from the market data.
valuation_date : str or date or datetime or timedelta, optional
The date at which the instrument is valued. the value is expressed in iso 8601
format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). by default,
marketdatadate is used. if marketdatadate is not specified, the default value is
today.
volatility : float, optional
Volatility(without unity) to override and that will be used as pricing analysis
input to compute marketvalueindealccy. introduced due to bachelier model, for
more details please have a look at apqps-13558 optional. no override is applied
by default. note that if premium is defined, volatility is not taken into
account.
volatility_percent : float, optional
The degree of the underlying asset's price variations over a specified time
period, used for the option pricing. the value is expressed in percentages. it
is used to compute marketvalueindealccy.if marketvalueindealccy is defined,
volatilitypercent is not taken into account. optional. by default, it is
computed from marketvalueindealccy. if volsurface fails to return a volatility,
it defaults to '20'.
"""
def __init__(
self,
atm_volatility_object: Optional[BidAskMid] = None,
butterfly10_d_object: Optional[BidAskMid] = None,
butterfly25_d_object: Optional[BidAskMid] = None,
domestic_deposit_rate_percent_object: Optional[BidAskMid] = None,
foreign_deposit_rate_percent_object: Optional[BidAskMid] = None,
forward_points_object: Optional[BidAskMid] = None,
fx_spot_object: Optional[BidAskMid] = None,
fx_swap_calculation_method: Union[FxSwapCalculationMethod, str] = None,
implied_volatility_object: Optional[BidAskMid] = None,
interpolation_weight: Optional[InterpolationWeight] = None,
option_price_side: Union[PriceSide, str] = None,
option_time_stamp: Union[TimeStamp, str] = None,
payout_custom_dates: Optional[List[str]] = None,
payout_scaling_interval: Optional[PayoutScaling] = None,
price_side: Union[PriceSide, str] = None,
pricing_model_type: Union[PricingModelType, str] = None,
risk_reversal10_d_object: Optional[BidAskMid] = None,
risk_reversal25_d_object: Optional[BidAskMid] = None,
underlying_price_side: Union[PriceSide, str] = None,
underlying_time_stamp: Optional[TimeStamp] = None,
volatility_model: Union[VolatilityModel, str] = None,
volatility_type: Optional[OptionVolatilityType] = None,
compute_payout_chart: Optional[bool] = None,
compute_volatility_payout: Optional[bool] = None,
cutoff_time: Optional[str] = None,
cutoff_time_zone: Optional[str] = None,
market_data_date: "OptDateTime" = None,
market_value_in_deal_ccy: Optional[float] = None,
market_value_in_report_ccy: Optional[float] = None,
report_ccy: Optional[str] = None,
report_ccy_rate: Optional[float] = None,
risk_free_rate_percent: Optional[float] = None,
simulate_exercise: Optional[bool] = None,
underlying_price: Optional[float] = None,
valuation_date: "OptDateTime" = None,
volatility: Optional[float] = None,
volatility_percent: Optional[float] = None,
) -> None:
super().__init__()
self.atm_volatility_object = atm_volatility_object
self.butterfly10_d_object = butterfly10_d_object
self.butterfly25_d_object = butterfly25_d_object
self.domestic_deposit_rate_percent_object = domestic_deposit_rate_percent_object
self.foreign_deposit_rate_percent_object = foreign_deposit_rate_percent_object
self.forward_points_object = forward_points_object
self.fx_spot_object = fx_spot_object
self.fx_swap_calculation_method = fx_swap_calculation_method
self.implied_volatility_object = implied_volatility_object
self.interpolation_weight = interpolation_weight
self.option_price_side = option_price_side
self.option_time_stamp = option_time_stamp
self.payout_custom_dates = try_copy_to_list(payout_custom_dates)
self.payout_scaling_interval = payout_scaling_interval
self.price_side = price_side
self.pricing_model_type = pricing_model_type
self.risk_reversal10_d_object = risk_reversal10_d_object
self.risk_reversal25_d_object = risk_reversal25_d_object
self.underlying_price_side = underlying_price_side
self.underlying_time_stamp = underlying_time_stamp
self.volatility_model = volatility_model
self.volatility_type = volatility_type
self.compute_payout_chart = compute_payout_chart
self.compute_volatility_payout = compute_volatility_payout
self.cutoff_time = cutoff_time
self.cutoff_time_zone = cutoff_time_zone
self.market_data_date = market_data_date
self.market_value_in_deal_ccy = market_value_in_deal_ccy
self.market_value_in_report_ccy = market_value_in_report_ccy
self.report_ccy = report_ccy
self.report_ccy_rate = report_ccy_rate
self.risk_free_rate_percent = risk_free_rate_percent
self.simulate_exercise = simulate_exercise
self.underlying_price = underlying_price
self.valuation_date = valuation_date
self.volatility = volatility
self.volatility_percent = volatility_percent
@property
def atm_volatility_object(self):
"""
:return: object BidAskMid
"""
return self._get_object_parameter(BidAskMid, "atmVolatilityObject")
@atm_volatility_object.setter
def atm_volatility_object(self, value):
self._set_object_parameter(BidAskMid, "atmVolatilityObject", value)
@property
def butterfly10_d_object(self):
"""
:return: object BidAskMid
"""
return self._get_object_parameter(BidAskMid, "butterfly10DObject")
@butterfly10_d_object.setter
def butterfly10_d_object(self, value):
self._set_object_parameter(BidAskMid, "butterfly10DObject", value)
@property
def butterfly25_d_object(self):
"""
:return: object BidAskMid
"""
return self._get_object_parameter(BidAskMid, "butterfly25DObject")
@butterfly25_d_object.setter
def butterfly25_d_object(self, value):
self._set_object_parameter(BidAskMid, "butterfly25DObject", value)
@property
def domestic_deposit_rate_percent_object(self):
"""
:return: object BidAskMid
"""
return self._get_object_parameter(BidAskMid, "domesticDepositRatePercentObject")
@domestic_deposit_rate_percent_object.setter
def domestic_deposit_rate_percent_object(self, value):
self._set_object_parameter(BidAskMid, "domesticDepositRatePercentObject", value)
@property
def foreign_deposit_rate_percent_object(self):
"""
:return: object BidAskMid
"""
return self._get_object_parameter(BidAskMid, "foreignDepositRatePercentObject")
@foreign_deposit_rate_percent_object.setter
def foreign_deposit_rate_percent_object(self, value):
self._set_object_parameter(BidAskMid, "foreignDepositRatePercentObject", value)
@property
def forward_points_object(self):
"""
:return: object BidAskMid
"""
return self._get_object_parameter(BidAskMid, "forwardPointsObject")
@forward_points_object.setter
def forward_points_object(self, value):
self._set_object_parameter(BidAskMid, "forwardPointsObject", value)
@property
def fx_spot_object(self):
"""
:return: object BidAskMid
"""
return self._get_object_parameter(BidAskMid, "fxSpotObject")
@fx_spot_object.setter
def fx_spot_object(self, value):
self._set_object_parameter(BidAskMid, "fxSpotObject", value)
@property
def fx_swap_calculation_method(self):
"""
The method used to calculate an outright price or deposit rates. the possible
values are:
- fxswapimpliedfromdeposit: implied fx swap points are computed from deposit
rates.
- depositccy1impliedfromfxswap: currency 1 deposit rates are computed using swap
points,
- depositccy2impliedfromfxswap: currency 2 deposit rates are computed using swap
points. the default value is 'depositccy2impliedfromfxswap'.
:return: enum FxSwapCalculationMethod
"""
return self._get_enum_parameter(FxSwapCalculationMethod, "fxSwapCalculationMethod")
@fx_swap_calculation_method.setter
def fx_swap_calculation_method(self, value):
self._set_enum_parameter(FxSwapCalculationMethod, "fxSwapCalculationMethod", value)
@property
def implied_volatility_object(self):
"""
:return: object BidAskMid
"""
return self._get_object_parameter(BidAskMid, "impliedVolatilityObject")
@implied_volatility_object.setter
def implied_volatility_object(self, value):
self._set_object_parameter(BidAskMid, "impliedVolatilityObject", value)
@property
def interpolation_weight(self):
"""
:return: object InterpolationWeight
"""
return self._get_object_parameter(InterpolationWeight, "interpolationWeight")
@interpolation_weight.setter
def interpolation_weight(self, value):
self._set_object_parameter(InterpolationWeight, "interpolationWeight", value)
@property
def option_price_side(self):
"""
The quoted price side of the instrument.the possible values are:
- bid,
- ask,
- mid,
- last. optional. the default values for listed options are:
- ask: if buysell is set to 'buy',
- bid: if buysell is set to 'sell',
- last: if buysell is not provided. the default value for otc options is 'mid'.
:return: enum PriceSide
"""
return self._get_enum_parameter(PriceSide, "optionPriceSide")
@option_price_side.setter
def option_price_side(self, value):
self._set_enum_parameter(PriceSide, "optionPriceSide", value)
@property
def option_time_stamp(self):
"""
The mode of the instrument's timestamp selection. the possible values are:
- open: the opening value of valuationdate, or if it is not available, the close
of the previous day is used,
- close: the close value of valuationdate is used,
- default: the latest snapshot is used when valuationdate is today, and the
close price when valuationdate is in the past. optional.the default value is
'default'.
:return: enum TimeStamp
"""
return self._get_enum_parameter(TimeStamp, "optionTimeStamp")
@option_time_stamp.setter
def option_time_stamp(self, value):
self._set_enum_parameter(TimeStamp, "optionTimeStamp", value)
@property
def payout_custom_dates(self):
"""
The array of dates set by a user for the payout/volatility chart. optional.no
default value applies.
:return: list string
"""
return self._get_list_parameter(str, "payoutCustomDates")
@payout_custom_dates.setter
def payout_custom_dates(self, value):
self._set_list_parameter(str, "payoutCustomDates", value)
@property
def payout_scaling_interval(self):
"""
:return: object PayoutScaling
"""
return self._get_object_parameter(PayoutScaling, "payoutScalingInterval")
@payout_scaling_interval.setter
def payout_scaling_interval(self, value):
self._set_object_parameter(PayoutScaling, "payoutScalingInterval", value)
@property
def price_side(self):
"""
The quoted price side of the instrument. the possible values are:
- bid,
- ask,
- mid.
:return: enum PriceSide
"""
return self._get_enum_parameter(PriceSide, "priceSide")
@price_side.setter
def price_side(self, value):
self._set_enum_parameter(PriceSide, "priceSide", value)
@property
def pricing_model_type(self):
"""
The model type of the option pricing. the possible values are:
- blackscholes
- bachelier (available for commodity options including calendar spread options)
- whaley
- binomial
- trinomial
- localvolatility (applicable only for barrier options, cbbc options and binary
options)
- vannavolga (only applicable for fxbarrieroption, fxdigitaloption and
fxtouchesoption) optional. the default value depends on the option type.
:return: enum PricingModelType
"""
return self._get_enum_parameter(PricingModelType, "pricingModelType")
@pricing_model_type.setter
def pricing_model_type(self, value):
self._set_enum_parameter(PricingModelType, "pricingModelType", value)
@property
def risk_reversal10_d_object(self):
"""
:return: object BidAskMid
"""
return self._get_object_parameter(BidAskMid, "riskReversal10DObject")
@risk_reversal10_d_object.setter
def risk_reversal10_d_object(self, value):
self._set_object_parameter(BidAskMid, "riskReversal10DObject", value)
@property
def risk_reversal25_d_object(self):
"""
:return: object BidAskMid
"""
return self._get_object_parameter(BidAskMid, "riskReversal25DObject")
@risk_reversal25_d_object.setter
def risk_reversal25_d_object(self, value):
self._set_object_parameter(BidAskMid, "riskReversal25DObject", value)
@property
def underlying_price_side(self):
"""
The quoted price side of the underlying asset.the possible values are:
- bid,
- ask,
- mid,
- last. optional. the default values are:
- ask: if buysell is set to 'buy',
- bid: if buysell is set to 'sell',
- last: if buysell is not provided.
:return: enum PriceSide
"""
return self._get_enum_parameter(PriceSide, "underlyingPriceSide")
@underlying_price_side.setter
def underlying_price_side(self, value):
self._set_enum_parameter(PriceSide, "underlyingPriceSide", value)
@property
def underlying_time_stamp(self):
"""
The mode of the underlying asset's timestamp selection. the possible values are:
- open: the opening value of valuationdate, or if it is not available, the close
of the previous day is used,
- close: the close value of valuationdate is used,
- default: the latest snapshot is used when valuationdate is today, and the
close price when valuationdate is in the past. optional.the default value is
'default'.
:return: enum TimeStamp
"""
return self._get_enum_parameter(TimeStamp, "underlyingTimeStamp")
@underlying_time_stamp.setter
def underlying_time_stamp(self, value):
self._set_enum_parameter(TimeStamp, "underlyingTimeStamp", value)
@property
def volatility_model(self):
"""
The model used to build the volatility surface. the possible values are:
- sabr,
- cubicspline,
- svi,
- twinlognormal,
- vannavolga10d,
- vannavolga25d.
:return: enum VolatilityModel
"""
return self._get_enum_parameter(VolatilityModel, "volatilityModel")
@volatility_model.setter
def volatility_model(self, value):
self._set_enum_parameter(VolatilityModel, "volatilityModel", value)
@property
def volatility_type(self):
"""
The type of volatility for the option pricing. the possible values are:
- implied: the volatility anticipated for the underlying asset for the remaining
life of the option(implied by an option premium),
- svisurface: the volatility computed from volsurface service,
- historical: the volatility of the underlying asset during a period in the
past. the value 'implied' is available only for listed options. if
volatilitypercent is defined, volatilitytype is not taken into account.
optional. the default value is 'implied'.
:return: enum OptionVolatilityType
"""
return self._get_enum_parameter(OptionVolatilityType, "volatilityType")
@volatility_type.setter
def volatility_type(self, value):
self._set_enum_parameter(OptionVolatilityType, "volatilityType", value)
@property
def compute_payout_chart(self):
"""
Define whether the payout chart must be computed or not
:return: bool
"""
return self._get_parameter("computePayoutChart")
@compute_payout_chart.setter
def compute_payout_chart(self, value):
self._set_parameter("computePayoutChart", value)
@property
def compute_volatility_payout(self):
"""
Define whether the volatility payout chart must be computed or not
:return: bool
"""
return self._get_parameter("computeVolatilityPayout")
@compute_volatility_payout.setter
def compute_volatility_payout(self, value):
self._set_parameter("computeVolatilityPayout", value)
@property
def cutoff_time(self):
"""
The cutoff time
:return: str
"""
return self._get_parameter("cutoffTime")
@cutoff_time.setter
def cutoff_time(self, value):
self._set_parameter("cutoffTime", value)
@property
def cutoff_time_zone(self):
"""
The cutoff time zone
:return: str
"""
return self._get_parameter("cutoffTimeZone")
@cutoff_time_zone.setter
def cutoff_time_zone(self, value):
self._set_parameter("cutoffTimeZone", value)
@property
def market_data_date(self):
"""
The date at which the market data is retrieved. the value is expressed in iso
8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). it
should be less or equal tovaluationdate). optional. by
default,marketdatadateisvaluationdateor today.
:return: str
"""
return self._get_parameter("marketDataDate")
@market_data_date.setter
def market_data_date(self, value):
self._set_datetime_parameter("marketDataDate", value)
@property
def market_value_in_deal_ccy(self):
"""
The market value (premium) of the instrument. the value is expressed in the deal
currency. it is used to define optionprice and compute volatilitypercent. if
marketvalueindealccy is defined, optionpriceside and volatilitypercent are not
taken into account; marketvalueindealccy and marketvalueinreportccy cannot be
overriden at a time. optional. by default, it is equal to optionprice for listed
options or computed from volatilitypercent for otc options.
:return: float
"""
return self._get_parameter("marketValueInDealCcy")
@market_value_in_deal_ccy.setter
def market_value_in_deal_ccy(self, value):
self._set_parameter("marketValueInDealCcy", value)
@property
def market_value_in_report_ccy(self):
"""
The market value (premium) of the instrument. it is computed as
[marketvalueindealccy fxspot]. the value is expressed in the reporting
currency. it is used to define optionprice and computevolatilitypercent.
ifmarketvalueinreportccyis defined, optionpriceside and volatilitypercentinputs
are not taken into account; marketvalueindealccy and marketvalueinreportccy
cannot be overriden at a time. optional. by default, fxspot rate is retrieved
from the market data.
:return: float
"""
return self._get_parameter("marketValueInReportCcy")
@market_value_in_report_ccy.setter
def market_value_in_report_ccy(self, value):
self._set_parameter("marketValueInReportCcy", value)
@property
def report_ccy(self):
"""
The reporting currency code, expressed in iso 4217 alphabetical format (e.g.,
'usd'). it is set for the fields ending with 'xxxinreportccy'. optional. the
default value is the notional currency.
:return: str
"""
return self._get_parameter("reportCcy")
@report_ccy.setter
def report_ccy(self, value):
self._set_parameter("reportCcy", value)
@property
def report_ccy_rate(self):
"""
The rate of the reporting currency against the option currency. it can be used
to calculate optionprice and marketvalueindealccy if marketvalueinreportccy is
defined. optional.by default, it is retrieved from the market data.
:return: float
"""
return self._get_parameter("reportCcyRate")
@report_ccy_rate.setter
def report_ccy_rate(self, value):
self._set_parameter("reportCcyRate", value)
@property
def risk_free_rate_percent(self):
"""
A risk-free rate of the option currency used for the option pricing. optional.
by default, the value is retrieved from the market data.
:return: float
"""
return self._get_parameter("riskFreeRatePercent")
@risk_free_rate_percent.setter
def risk_free_rate_percent(self, value):
self._set_parameter("riskFreeRatePercent", value)
@property
def simulate_exercise(self):
"""
Tells if payoff-linked cashflow should be returned. possible values:
- true
- false
:return: bool
"""
return self._get_parameter("simulateExercise")
@simulate_exercise.setter
def simulate_exercise(self, value):
self._set_parameter("simulateExercise", value)
@property
def underlying_price(self):
"""
The price of the underlying asset. the value is expressed in the deal currency.
if underlyingprice is defined, underlyingpriceside is not taken into account.
optional. by default, the value is retrieved from the market data.
:return: float
"""
return self._get_parameter("underlyingPrice")
@underlying_price.setter
def underlying_price(self, value):
self._set_parameter("underlyingPrice", value)
@property
def valuation_date(self):
"""
The date at which the instrument is valued. the value is expressed in iso 8601
format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z'). by default,
marketdatadate is used. if marketdatadate is not specified, the default value is
today.
:return: str
"""
return self._get_parameter("valuationDate")
@valuation_date.setter
def valuation_date(self, value):
self._set_datetime_parameter("valuationDate", value)
@property
def volatility(self):
"""
Volatility(without unity) to override and that will be used as pricing analysis
input to compute marketvalueindealccy. introduced due to bachelier model, for
more details please have a look at apqps-13558 optional. no override is applied
by default. note that if premium is defined, volatility is not taken into
account.
:return: float
"""
return self._get_parameter("volatility")
@volatility.setter
def volatility(self, value):
self._set_parameter("volatility", value)
@property
def volatility_percent(self):
"""
The degree of the underlying asset's price variations over a specified time
period, used for the option pricing. the value is expressed in percentages. it
is used to compute marketvalueindealccy.if marketvalueindealccy is defined,
volatilitypercent is not taken into account. optional. by default, it is
computed from marketvalueindealccy. if volsurface fails to return a volatility,
it defaults to '20'.
:return: float
"""
return self._get_parameter("volatilityPercent")
@volatility_percent.setter
def volatility_percent(self, value):
self._set_parameter("volatilityPercent", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/option/_option_pricing_parameters.py | 0.911214 | 0.574156 | _option_pricing_parameters.py | pypi |
__all__ = (
"AverageType",
"BarrierMode",
"BarrierStyle",
"BidAskMid",
"BinaryType",
"BuySell",
"CallPut",
"DayWeight",
"Definition",
"DoubleBinaryType",
"EtiBarrierDefinition",
"EtiBinaryDefinition",
"EtiCbbcDefinition",
"EtiDoubleBarriersDefinition",
"EtiFixingInfo",
"EtiUnderlyingDefinition",
"ExerciseStyle",
"FixingFrequency",
"FxAverageInfo",
"FxBarrierDefinition",
"FxBinaryDefinition",
"FxBinaryType",
"FxDoubleBarrierDefinition",
"FxDoubleBarrierInfo",
"FxDoubleBinaryDefinition",
"FxDualCurrencyDefinition",
"FxForwardStart",
"FxSwapCalculationMethod",
"FxUnderlyingDefinition",
"InOrOut",
"InputFlow",
"InterpolationWeight",
"OptionVolatilityType",
"PayoutScaling",
"PremiumSettlementType",
"PriceSide",
"PricingModelType",
"PricingParameters",
"SettlementType",
"Status",
"TimeStamp",
"UnderlyingType",
"UpOrDown",
"VolatilityModel",
"VolatilityType",
)
from ..._models import DayWeight
from ._definition import Definition
from ._enums import (
AverageType,
BarrierMode,
BarrierStyle,
BinaryType,
BuySell,
CallPut,
DoubleBinaryType,
ExerciseStyle,
FixingFrequency,
FxBinaryType,
FxSwapCalculationMethod,
InOrOut,
OptionVolatilityType,
PremiumSettlementType,
PriceSide,
PricingModelType,
SettlementType,
Status,
TimeStamp,
UnderlyingType,
UpOrDown,
VolatilityModel,
VolatilityType,
)
from ._eti import (
EtiBarrierDefinition,
EtiBinaryDefinition,
EtiCbbcDefinition,
EtiDoubleBarriersDefinition,
EtiFixingInfo,
EtiUnderlyingDefinition,
)
from ._fx import (
FxAverageInfo,
FxBarrierDefinition,
FxBinaryDefinition,
FxDoubleBarrierDefinition,
FxDoubleBarrierInfo,
FxDoubleBinaryDefinition,
FxDualCurrencyDefinition,
FxForwardStart,
FxUnderlyingDefinition,
)
from ..._models import (
BidAskMid,
DayWeight,
InputFlow,
InterpolationWeight,
PayoutScaling,
)
from ._option_pricing_parameters import PricingParameters | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/option/__init__.py | 0.5083 | 0.318181 | __init__.py | pypi |
from typing import Optional, Union, TYPE_CHECKING
from ._enums import BuySell, CallPut, ExerciseStyle, UnderlyingType, SettlementType
from ._eti import (
EtiUnderlyingDefinition,
EtiBinaryDefinition,
EtiBarrierDefinition,
EtiCbbcDefinition,
EtiDoubleBarriersDefinition,
EtiFixingInfo,
)
from ._fx import (
FxUnderlyingDefinition,
FxBinaryDefinition,
FxBarrierDefinition,
FxDualCurrencyDefinition,
FxAverageInfo,
FxDoubleBarrierDefinition,
FxDoubleBinaryDefinition,
FxForwardStart,
)
from ._option_instrument_definition import OptionInstrumentDefinition
from ._option_pricing_parameters import PricingParameters
from .._base_definition import BaseDefinition
from ..._models import InputFlow
from ....._tools import validate_types, try_copy_to_list
if TYPE_CHECKING:
from ....._types import ExtendedParams, OptStrStrs, OptDateTime
from ...financial_contracts._stream_facade import Stream
from ....._core.session import Session
class Definition(BaseDefinition):
"""
Parameters
----------
instrument_tag : str, optional
User defined string to identify the instrument.It can be used to link output
results to the instrument definition. Only alphabetic, numeric and '- _.#=@'
characters are supported. Optional.
end_date : str or date or datetime or timedelta, optional
Expiry date of the option
buy_sell : BuySell or str, optional
The side of the deal.
call_put : CallPut or str, optional
Tells if the option is a call or a put.
exercise_style : ExerciseStyle or str, optional
EURO or AMER
underlying_type : UnderlyingType, optional
Underlying type of the option.
strike : float, optional
strike of the option
tenor : str, optional
tenor of the option
notional_ccy : str, optional
Currency of the notional amount If the option is a EURGBP Call option,
notional_ccy can be expressed in EUR OR GBP
notional_amount : float, optional
The notional amount of currency If the option is a EURGBP Call option, amount of
EUR or GBP of the contract
asian_definition : FxOptionAverageInfo, EtiOptionFixingInfo, optional
Fixing details for asian options
barrier_definition : FxOptionBarrierDefinition, EtiOptionBarrierDefinition, optional
Details for barrier option.
binary_definition : FxOptionBinaryDefinition, EtiOptionBinaryDefinition, optional
Details for binary option.
double_barrier_definition : FxOptionDoubleBarrierDefinition, optional
Details for double barriers option.
double_binary_definition : FxOptionDoubleBinaryDefinition, optional
Details for double binary option.
dual_currency_definition : FxDualCurrencyDefinition, optional
Details for dual currency option.
forward_start_definition : FxOptionForwardStart, optional
Details for Forward Start option.
underlying_definition : FxUnderlyingDefinition, EtiUnderlyingDefinition, optional
Details of the underlying. Can be used to override some data of the underlying.
delivery_date : str or date or datetime or timedelta, optional
Expiry date of the option
instrument_code : str, optional
An option RIC that is used to retrieve the description of the
EtiOptionDefinition contract. Optional.If null, the instrument_code of
underlying_definition must be provided.
cbbc_definition : EtiOptionCbbcDefinition, optional
Details for CBBC (Call Bear/Bull Contract) option.
double_barriers_definition : EtiOptionDoubleBarriersDefinition, optional
Details for double barriers option.
deal_contract : int, optional
deal_contract. It is the number of contracts bought or sold in the deal.
end_date_time : str or date or datetime or timedelta, optional
Expiry date time of the option
lot_size : float, optional
The lot size. It is the number of options bought or sold in one transaction.
offset : int, optional
offset. The offset in minutes between the time UTC and the time of the exchange
where the contract is traded.
fields: list of str, optional
Contains the list of Analytics that the quantitative analytic service will
compute.
pricing_parameters : PricingParameters, optional
The pricing parameters to apply to this instrument. Optional. If pricing
parameters are not provided at this level parameters defined globally at the
request level are used. If no pricing parameters are provided globally default
values apply.
extended_params : dict, optional
If necessary other parameters
Methods
-------
get_data(session=session, on_response=on_response)
Returns a response to the data platform
get_stream(session=session)
Get stream quantitative analytic service subscription
Examples
--------
>>> import refinitiv.data.content.ipa.financial_contracts as rdf
>>> definition = rdf.option.Definition(
... instrument_code="FCHI560000L1.p",
... underlying_type=rdf.option.UnderlyingType.ETI,
... fields=[
... "MarketValueInDealCcy",
... "DeltaPercent",
... "GammaPercent",
... "RhoPercent",
... "ThetaPercent",
... "VegaPercent",
... "ErrorCode",
... "ErrorMessage",
... ],
... )
>>> response = definition.get_data()
Using get_stream
>>> response = definition.get_stream()
"""
def __init__(
self,
asian_definition: Union[EtiFixingInfo, FxAverageInfo] = None,
barrier_definition: Union[FxBarrierDefinition, EtiBarrierDefinition] = None,
binary_definition: Union[FxBinaryDefinition, EtiBinaryDefinition] = None,
buy_sell: Union[BuySell, str] = None,
call_put: Union[CallPut, str] = None,
cbbc_definition: Optional[EtiCbbcDefinition] = None,
deal_contract: Optional[int] = None,
delivery_date: "OptDateTime" = None,
double_barrier_definition: Optional[FxDoubleBarrierDefinition] = None,
double_barriers_definition: Optional[EtiDoubleBarriersDefinition] = None,
double_binary_definition: Union[FxDoubleBinaryDefinition] = None,
dual_currency_definition: Optional[FxDualCurrencyDefinition] = None,
end_date: "OptDateTime" = None,
end_date_time: "OptDateTime" = None,
exercise_style: Union[ExerciseStyle, str] = None,
forward_start_definition: Optional[FxForwardStart] = None,
instrument_code: Optional[str] = None,
instrument_tag: Optional[str] = None,
lot_size: Optional[float] = None,
notional_amount: Optional[float] = None,
notional_ccy: Optional[str] = None,
payments: Optional[InputFlow] = None,
settlement_ccy: Optional[str] = None,
settlement_type: Union[SettlementType, str] = None,
start_date: "OptDateTime" = None,
strike: Optional[float] = None,
tenor: Optional[str] = None,
time_zone_offset: Optional[int] = None,
underlying_definition: Union[FxUnderlyingDefinition, EtiUnderlyingDefinition] = None,
underlying_type: Union[UnderlyingType, str] = None,
fields: "OptStrStrs" = None,
pricing_parameters: Optional[PricingParameters] = None,
extended_params: "ExtendedParams" = None,
):
validate_types(deal_contract, [int, type(None)], "deal_contract")
fields = try_copy_to_list(fields)
definition = OptionInstrumentDefinition(
asian_definition=asian_definition,
barrier_definition=barrier_definition,
binary_definition=binary_definition,
buy_sell=buy_sell,
call_put=call_put,
cbbc_definition=cbbc_definition,
deal_contract=deal_contract,
delivery_date=delivery_date,
double_barrier_definition=double_barrier_definition,
double_barriers_definition=double_barriers_definition,
double_binary_definition=double_binary_definition,
dual_currency_definition=dual_currency_definition,
end_date=end_date,
end_date_time=end_date_time,
exercise_style=exercise_style,
forward_start_definition=forward_start_definition,
instrument_code=instrument_code,
instrument_tag=instrument_tag,
lot_size=lot_size,
notional_amount=notional_amount,
notional_ccy=notional_ccy,
payments=payments,
settlement_ccy=settlement_ccy,
settlement_type=settlement_type,
start_date=start_date,
strike=strike,
tenor=tenor,
time_zone_offset=time_zone_offset,
underlying_definition=underlying_definition,
underlying_type=underlying_type,
)
super().__init__(
definition=definition,
fields=fields,
pricing_parameters=pricing_parameters,
extended_params=extended_params,
)
def get_stream(self, session: Optional["Session"] = None) -> "Stream":
fields = self._kwargs.get("fields")
if fields is None:
response = self.get_data(session=session)
if isinstance(response.data.raw, dict) and "headers" in response.data.raw:
fields = [item.get("name", "") for item in response.data.raw["headers"]]
self._kwargs["fields"] = fields
return super().get_stream(session=session) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/option/_definition.py | 0.919018 | 0.308281 | _definition.py | pypi |
from typing import Optional, Union
from refinitiv.data._types import OptDateTime
from .._base import Info
from .._enums import (
AverageType,
FixingFrequency,
)
class EtiFixingInfo(Info):
"""
Parameters
----------
average_type : AverageType or str, optional
The type of average used to compute.
fixing_frequency : FixingFrequency or str, optional
The fixing's frequency.
average_so_far : float, optional
The value of the average_type
fixing_calendar : str, optional
The calendar of the underlying's currency.
fixing_end_date : str or date or datetime or timedelta, optional
The end date of the fixing period. Should be less or equal to the expiry.
fixing_start_date : str or date or datetime or timedelta, optional
The beginning date of the fixing period.
include_holidays : bool, optional
Include the holidays in the list of fixings
include_week_ends : bool, optional
Include the week-ends in the list of fixings
"""
def __init__(
self,
average_type: Union[AverageType, str] = None,
fixing_frequency: Union[FixingFrequency, str] = None,
average_so_far: Optional[float] = None,
fixing_calendar: Optional[str] = None,
fixing_end_date: "OptDateTime" = None,
fixing_start_date: "OptDateTime" = None,
include_holidays: Optional[bool] = None,
include_week_ends: Optional[bool] = None,
) -> None:
super().__init__()
self.average_type = average_type
self.fixing_frequency = fixing_frequency
self.average_so_far = average_so_far
self.fixing_calendar = fixing_calendar
self.fixing_end_date = fixing_end_date
self.fixing_start_date = fixing_start_date
self.include_holidays = include_holidays
self.include_week_ends = include_week_ends
@property
def average_type(self):
"""
The type of average used to compute. Possible values:
- ArithmeticRate
- ArithmeticStrike
- GeometricRate
- GeometricStrike
:return: enum AverageType
"""
return self._get_enum_parameter(AverageType, "averageType")
@average_type.setter
def average_type(self, value):
self._set_enum_parameter(AverageType, "averageType", value)
@property
def fixing_frequency(self):
"""
The fixing's frequency. Possible values:
- Daily
- Weekly
- BiWeekly
- Monthly
- Quaterly
- SemiAnnual
- Annual
:return: enum FixingFrequency
"""
return self._get_enum_parameter(FixingFrequency, "fixingFrequency")
@fixing_frequency.setter
def fixing_frequency(self, value):
self._set_enum_parameter(FixingFrequency, "fixingFrequency", value)
@property
def average_so_far(self):
"""
The value of the average_type
:return: float
"""
return self._get_parameter("averageSoFar")
@average_so_far.setter
def average_so_far(self, value):
self._set_parameter("averageSoFar", value)
@property
def fixing_calendar(self):
"""
The calendar of the underlying's currency.
:return: str
"""
return self._get_parameter("fixingCalendar")
@fixing_calendar.setter
def fixing_calendar(self, value):
self._set_parameter("fixingCalendar", value)
@property
def fixing_end_date(self):
"""
The end date of the fixing period. Should be less or equal to the expiry.
:return: str
"""
return self._get_parameter("fixingEndDate")
@fixing_end_date.setter
def fixing_end_date(self, value):
self._set_datetime_parameter("fixingEndDate", value)
@property
def fixing_start_date(self):
"""
The beginning date of the fixing period.
:return: str
"""
return self._get_parameter("fixingStartDate")
@fixing_start_date.setter
def fixing_start_date(self, value):
self._set_datetime_parameter("fixingStartDate", value)
@property
def include_holidays(self):
"""
Include the holidays in the list of fixings
:return: bool
"""
return self._get_parameter("includeHolidays")
@include_holidays.setter
def include_holidays(self, value):
self._set_parameter("includeHolidays", value)
@property
def include_week_ends(self):
"""
Include the week-ends in the list of fixings
:return: bool
"""
return self._get_parameter("includeWeekEnds")
@include_week_ends.setter
def include_week_ends(self, value):
self._set_parameter("includeWeekEnds", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/option/_eti/_eti_fixing_info.py | 0.959039 | 0.428353 | _eti_fixing_info.py | pypi |
from typing import Optional, Union
from ......_types import OptDateTime
from .._base import UnderlyingDefinition
from ..._instrument_definition import InstrumentDefinition
from .._enums import (
BuySell,
CallPut,
ExerciseStyle,
UnderlyingType,
)
from ._eti_barrier_definition import EtiBarrierDefinition
from ._eti_binary_definition import EtiBinaryDefinition
from ._eti_cbbc_definition import EtiCbbcDefinition
from ._eti_double_barriers_definition import EtiDoubleBarriersDefinition
from ._eti_fixing_info import EtiFixingInfo
from ._eti_underlying_definition import EtiUnderlyingDefinition
class EtiDefinition(InstrumentDefinition):
"""
Parameters
----------
instrument_tag : str, optional
User defined string to identify the instrument.It can be used to link output
results to the instrument definition. Only alphabetic, numeric and '- _.#=@'
characters are supported. Optional.
instrument_code : str, optional
An option RIC that is used to retrieve the description of the
EtiOptionDefinition contract. Optional.If null, the instrument_code of
underlying_definition must be provided.
end_date : str or date or datetime or timedelta, optional
Expiry date of the option
asian_definition : EtiOptionFixingInfo, optional
Fixing details for asian options
barrier_definition : EtiOptionBarrierDefinition, optional
Details for barrier option.
binary_definition : EtiOptionBinaryDefinition, optional
Details for binary option.
buy_sell : BuySell or str, optional
The side of the deal.
call_put : CallPut or str, optional
Tells if the option is a call or a put.
cbbc_definition : EtiOptionCbbcDefinition, optional
Details for CBBC (Call Bear/Bull Contract) option.
double_barriers_definition : EtiOptionDoubleBarriersDefinition, optional
Details for double barriers option.
exercise_style : ExerciseStyle or str, optional
EURO or AMER
underlying_definition : EtiUnderlyingDefinition, optional
Details of the underlying. Can be used to override some data of the underlying.
underlying_type : UnderlyingType or str, optional
Underlying type of the option.
deal_contract : int, optional
deal_contract. It is the number of contracts bought or sold in the deal.
end_date_time : str or date or datetime or timedelta, optional
Expiry date time of the option
lot_size : float, optional
The lot size. It is the number of options bought or sold in one transaction.
offset : int, optional
offset. The offset in minutes between the time UTC and the time of the exchange
where the contract is traded.
strike : float, optional
strike of the option
"""
def __init__(
self,
instrument_tag: Optional[str] = None,
instrument_code: Optional[str] = None,
start_date: "OptDateTime" = None,
end_date: "OptDateTime" = None,
asian_definition: Optional[EtiFixingInfo] = None,
barrier_definition: Optional[EtiBarrierDefinition] = None,
binary_definition: Optional[EtiBinaryDefinition] = None,
buy_sell: Union[BuySell, str] = None,
call_put: Union[CallPut, str] = None,
cbbc_definition: Optional[EtiCbbcDefinition] = None,
double_barriers_definition: Optional[EtiDoubleBarriersDefinition] = None,
exercise_style: Union[ExerciseStyle, str] = None,
underlying_definition: Optional[EtiUnderlyingDefinition] = None,
underlying_type: Union[UnderlyingType, str] = None,
deal_contract: Optional[int] = None,
end_date_time: "OptDateTime" = None,
lot_size: Optional[float] = None,
strike: Optional[float] = None,
time_zone_offset: Optional[int] = None,
**kwargs,
) -> None:
super().__init__(instrument_tag, **kwargs)
self.instrument_tag = instrument_tag
self.instrument_code = instrument_code
self.start_date = start_date
self.end_date = end_date
self.asian_definition = asian_definition
self.barrier_definition = barrier_definition
self.binary_definition = binary_definition
self.buy_sell = buy_sell
self.call_put = call_put
self.cbbc_definition = cbbc_definition
self.double_barriers_definition = double_barriers_definition
self.exercise_style = exercise_style
self.underlying_definition = underlying_definition
self.underlying_type = underlying_type
self.deal_contract = deal_contract
self.end_date_time = end_date_time
self.lot_size = lot_size
self.strike = strike
self.time_zone_offset = time_zone_offset
@property
def asian_definition(self):
"""
:return: object EtiOptionFixingInfo
"""
return self._get_object_parameter(EtiFixingInfo, "asianDefinition")
@asian_definition.setter
def asian_definition(self, value):
self._set_object_parameter(EtiFixingInfo, "asianDefinition", value)
@property
def barrier_definition(self):
"""
:return: object EtiOptionBarrierDefinition
"""
return self._get_object_parameter(EtiBarrierDefinition, "barrierDefinition")
@barrier_definition.setter
def barrier_definition(self, value):
self._set_object_parameter(EtiBarrierDefinition, "barrierDefinition", value)
@property
def binary_definition(self):
"""
:return: object EtiOptionBinaryDefinition
"""
return self._get_object_parameter(EtiBinaryDefinition, "binaryDefinition")
@binary_definition.setter
def binary_definition(self, value):
self._set_object_parameter(EtiBinaryDefinition, "binaryDefinition", value)
@property
def buy_sell(self):
"""
The indicator of the deal side. the possible values are:
- buy: buying the option,
- sell: selling/writing the option. the output amounts calculated with taking
buysell into consideration are returned with a reversed sign when the value
'sell' is used. optional. the default value is 'buy'.
:return: enum BuySell
"""
return self._get_enum_parameter(BuySell, "buySell")
@buy_sell.setter
def buy_sell(self, value):
self._set_enum_parameter(BuySell, "buySell", value)
@property
def call_put(self):
"""
The indicator if the option is a call or a put. the possible values are:
- call: the right to buy the underlying asset,
- put: the right to sell the underlying asset. optional. if instrumentcode of
listed eti option is defined, the value comes from the instrument reference
data.the default value is 'call' for otc eti options and fx options.
:return: enum CallPut
"""
return self._get_enum_parameter(CallPut, "callPut")
@call_put.setter
def call_put(self, value):
self._set_enum_parameter(CallPut, "callPut", value)
@property
def cbbc_definition(self):
"""
:return: object EtiOptionCbbcDefinition
"""
return self._get_object_parameter(EtiCbbcDefinition, "cbbcDefinition")
@cbbc_definition.setter
def cbbc_definition(self, value):
self._set_object_parameter(EtiCbbcDefinition, "cbbcDefinition", value)
@property
def double_barriers_definition(self):
"""
:return: object EtiOptionDoubleBarriersDefinition
"""
return self._get_object_parameter(EtiDoubleBarriersDefinition, "doubleBarriersDefinition")
@double_barriers_definition.setter
def double_barriers_definition(self, value):
self._set_object_parameter(EtiDoubleBarriersDefinition, "doubleBarriersDefinition", value)
@property
def exercise_style(self):
"""
The option style based on its exercise restrictions. the possible values are:
- amer: the owner has the right to exercise on any date before the option
expires,
- euro: the owner has the right to exercise only on enddate,
- berm: the owner has the right to exercise on any of several specified dates
before the option expires. all exercise styles may not apply to certain option
types. optional. if instrumentcode of listed eti option is defined, the value
comes from the instrument reference data. the default value is 'euro' for otc
eti options and fx options.
:return: enum ExerciseStyle
"""
return self._get_enum_parameter(ExerciseStyle, "exerciseStyle")
@exercise_style.setter
def exercise_style(self, value):
self._set_enum_parameter(ExerciseStyle, "exerciseStyle", value)
@property
def underlying_definition(self):
"""
:return: object EtiUnderlyingDefinition
"""
return self._get_object_parameter(UnderlyingDefinition, "underlyingDefinition")
@underlying_definition.setter
def underlying_definition(self, value):
self._set_object_parameter(UnderlyingDefinition, "underlyingDefinition", value)
@property
def underlying_type(self):
"""
The type of the option based on the underlying asset. the possible values are:
- eti: eti(exchanged traded instruments) options,
- fx: fx options. mandatory. no default value applies.
:return: enum UnderlyingType
"""
return self._get_enum_parameter(UnderlyingType, "underlyingType")
@underlying_type.setter
def underlying_type(self, value):
self._set_enum_parameter(UnderlyingType, "underlyingType", value)
@property
def deal_contract(self):
"""
The number of contracts bought or sold in the deal. optional.the default value
is '1'.
:return: int
"""
return self._get_parameter("dealContract")
@deal_contract.setter
def deal_contract(self, value):
self._set_parameter("dealContract", value)
@property
def end_date(self):
"""
The maturity or expiry date of the instrument. the value is expressed in iso
8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g., '2021-01-01t00:00:00z').
optional. mandatory for otc eti options and fx options(if tenor is not defined).
if instrumentcode of listed eti option is defined, the value comes from the
instrument reference data.
:return: str
"""
return self._get_parameter("endDate")
@end_date.setter
def end_date(self, value):
self._set_datetime_parameter("endDate", value)
@property
def end_date_time(self):
"""
The expiry date and time of the instrument at the exchange where it is traded.
the value is expressed in iso 8601 format: yyyy-mm-ddt[hh]:[mm]:[ss]z (e.g.,
'2021-01-01t00:00:00z'). optional. no default value applies.
:return: str
"""
return self._get_parameter("endDateTime")
@end_date_time.setter
def end_date_time(self, value):
self._set_datetime_parameter("endDateTime", value)
@property
def instrument_code(self):
"""
The code (an option ric) used to define the instrument. optional. mandatory for
listed eti options. for otc eti options instrumentcode of the underlying asset
must be provided. no default value applies.
:return: str
"""
return self._get_parameter("instrumentCode")
@instrument_code.setter
def instrument_code(self, value):
self._set_parameter("instrumentCode", value)
@property
def instrument_tag(self):
"""
User defined string to identify the instrument.it can be used to link output
results to the instrument definition. only alphabetic, numeric and '- _.#=@'
characters are supported. optional.
:return: str
"""
return self._get_parameter("instrumentTag")
@instrument_tag.setter
def instrument_tag(self, value):
self._set_parameter("instrumentTag", value)
@property
def lot_size(self):
"""
The number of the underlying asset unit on which the option is written. it can
be overriden only for commodity options. optional. if instrumentcode of listed
eti option is defined the value comes from the instrument reference data. the
default value is '1' for otc eti options.
:return: float
"""
return self._get_parameter("lotSize")
@lot_size.setter
def lot_size(self, value):
self._set_parameter("lotSize", value)
@property
def start_date(self):
"""
Start date of the option
:return: str
"""
return self._get_parameter("startDate")
@start_date.setter
def start_date(self, value):
self._set_datetime_parameter("startDate", value)
@property
def strike(self):
"""
The set price at which the owner of the option can buy or sell the underlying
asset. the value is expressed according to the market convention linked to the
underlying asset. optional. mandatory for otc eti options and fx options. if
instrumentcode of listed eti option is defined, the value comes from the
instrument reference data.
:return: float
"""
return self._get_parameter("strike")
@strike.setter
def strike(self, value):
self._set_parameter("strike", value)
@property
def time_zone_offset(self):
"""
The offset in minutes between utc and the time of the exchange where the
contract is traded. optional. no default value applies.
:return: int
"""
return self._get_parameter("timeZoneOffset")
@time_zone_offset.setter
def time_zone_offset(self, value):
self._set_parameter("timeZoneOffset", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/option/_eti/_eti_definition.py | 0.934753 | 0.255646 | _eti_definition.py | pypi |
from typing import Optional, Union
from .._base import BinaryDefinition
from .._enums import (
FxBinaryType,
)
class FxBinaryDefinition(BinaryDefinition):
"""
Parameters
----------
binary_type : FxBinaryType or str, optional
The type of a binary option.
payout_amount : float, optional
The payout amount of the option. the default value is '1,000,000'.
payout_ccy : str, optional
The trade currency, which is either a domestic or foreign currency. either
payoutccy or settlementtype can be used at a time. payoutccy="foreign currency"
is equivalent to settlementtype ="physical", and payoutccy="domestic currency"
is equivalent to settlementtype ="cash". the value is expressed in iso 4217
alphabetical format (e.g. 'usd').
trigger : float, optional
The trigger of the binary option.
"""
def __init__(
self,
binary_type: Union[FxBinaryType, str] = None,
payout_amount: Optional[float] = None,
payout_ccy: Optional[str] = None,
trigger: Optional[float] = None,
) -> None:
super().__init__()
self.binary_type = binary_type
self.payout_amount = payout_amount
self.payout_ccy = payout_ccy
self.trigger = trigger
@property
def binary_type(self):
"""
Binary Type of the digital option
:return: enum FxBinaryType
"""
return self._get_enum_parameter(FxBinaryType, "binaryType")
@binary_type.setter
def binary_type(self, value):
self._set_enum_parameter(FxBinaryType, "binaryType", value)
@property
def payout_amount(self):
"""
Payout of the binary option. Default
:return: float
"""
return self._get_parameter("payoutAmount")
@payout_amount.setter
def payout_amount(self, value):
self._set_parameter("payoutAmount", value)
@property
def payout_ccy(self):
"""
Payout Currency of the binary option. Default
:return: str
"""
return self._get_parameter("payoutCcy")
@payout_ccy.setter
def payout_ccy(self, value):
self._set_parameter("payoutCcy", value)
@property
def trigger(self):
"""
trigger of the binary option.
:return: float
"""
return self._get_parameter("trigger")
@trigger.setter
def trigger(self, value):
self._set_parameter("trigger", value) | /refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ipa/financial_contracts/option/_fx/_fx_binary_definition.py | 0.942705 | 0.355859 | _fx_binary_definition.py | pypi |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.