code
stringlengths
114
1.05M
path
stringlengths
3
312
quality_prob
float64
0.5
0.99
learning_prob
float64
0.2
1
filename
stringlengths
3
168
kind
stringclasses
1 value
from enum import unique from typing import Optional, Union from ..._tools import make_enum_arg_parser_by_members from ..._base_enum import StrEnum @unique class CountryCode(StrEnum): """ List of ISO 3166 country codes. 'G:7R' is unique country code for Afghanistan """ AFG = "G:7R" ALB = "G:7G" DZA = "G:7S" ASM = "G:39" AND = "G:32" AGO = "G:82" AIA = "G:1H" ATA = "G:AY" ATG = "G:31" ARG = "G:60" ARM = "G:7I" ABW = "G:AD" AUS = "G:2H" AUT = "G:1F" AZE = "G:4R" BHS = "G:5E" BHR = "G:5Q" BGD = "G:9B" BRB = "G:8P" BLR = "G:8B" BEL = "G:9Y" BLZ = "G:6C" BEN = "G:48" BMU = "G:75" BTN = "G:8F" BOL = "G:3J" BIH = "G:2D" BWA = "G:61" BVT = "G:6F" BRA = "G:26" IOT = "G:9R" BRN = "G:80" BGR = "G:1W" BFA = "G:5A" BDI = "G:68" KHM = "G:9G" CMR = "G:4I" CAN = "G:8W" CPV = "G:AF" CYM = "G:4J" CAF = "G:5G" TCD = "G:A6" CHL = "G:4M" CHN = "G:B1" CXR = "G:5R" CCK = "G:67" COL = "G:2S" COM = "G:99" COD = "G:8A" COG = "G:5K" COK = "G:1L" CRI = "G:5H" HRV = "G:5X" CUB = "G:7X" CYP = "G:8T" CZE = "G:2E" DNK = "G:19" DJI = "G:6Z" DMA = "G:8U" DOM = "G:76" ECU = "G:8Q" EGY = "G:3G" SLV = "G:AB" GNQ = "G:5L" ERI = "G:6K" EST = "G:9D" SWZ = "G:7H" ETH = "G:6L" FLK = "G:3L" FRO = "G:24" FJI = "G:3Z" FIN = "G:90" FRA = "G:5M" GUF = "G:3B" PYF = "G:54" ATF = "G:9V" GAB = "G:69" GMB = "G:77" GEO = "G:9F" DEU = "G:3D" GHA = "G:5N" GIB = "G:79" GRC = "G:6A" GRL = "G:2R" GRD = "G:9A" GLP = "G:4Q" GUM = "G:2Y" GTM = "G:96" GGY = "G:34" GIN = "G:9L" GNB = "G:9Z" GUY = "G:44" HTI = "G:22" HMD = "G:4W" HND = "G:AG" HKG = "G:3H" HUN = "G:46" ISL = "G:6I" IND = "G:5B" IDN = "G:25" IRN = "G:56" IRQ = "G:8G" IRL = "G:6X" IMN = "G:35" ISR = "G:3S" ITA = "G:5J" JAM = "G:1G" JPN = "G:41" JEY = "G:33" JOR = "G:1Z" KAZ = "G:85" KEN = "G:70" KIR = "G:7P" PRK = "G:AE" KOR = "G:83" KWT = "G:7Q" KGZ = "G:8R" LAO = "G:8L" LVA = "G:4H" LBN = "G:64" LSO = "G:2M" LBR = "G:3U" LBY = "G:6W" LIE = "G:A9" LTU = "G:8I" LUX = "G:7M" MAC = "G:3I" MKD = "G:AI" MDG = "G:7Z" MWI = "G:6G" MYS = "G:8S" MDV = "G:6H" MLI = "G:3V" MLT = "G:4G" MHL = "G:36" MTQ = "G:8C" MRT = "G:2X" MUS = "G:9N" MEX = "G:2V" FSM = "G:9E" MDA = "G:6P" MCO = "G:88" MNG = "G:66" MNE = "G:3E" MSR = "G:1X" MAR = "G:8X" MOZ = "G:2B" MMR = "G:72" NAM = "G:6Q" NRU = "G:8J" NPL = "G:2J" NLD = "G:7K" NCL = "G:2L" NZL = "G:49" NIC = "G:AC" NER = "G:2U" NGA = "G:6B" NIU = "G:62" NFK = "G:7Y" NOR = "G:3N" OMN = "G:7B" PAK = "G:2P" PLW = "G:2N" PSE = "G:59" PAN = "G:4U" PNG = "G:2G" PRY = "G:89" PER = "G:3T" PHL = "G:7L" PCN = "G:15" POL = "G:5Y" PRT = "G:A3" PRI = "G:5U" QAT = "G:51" REU = "G:6N" ROU = "G:2Z" RUS = "G:38" RWA = "G:AA" SHN = "G:9S" KNA = "G:40" LCA = "G:3A" SPM = "G:4E" VCT = "G:3F" WSM = "G:2F" SMR = "G:78" STP = "G:5F" SAU = "G:92" SEN = "G:6E" SRB = "G:7F" SYC = "G:5C" SLE = "G:A5" SGP = "G:7D" SVK = "G:1C" SVN = "G:74" SLB = "G:1Y" SOM = "G:5D" ZAF = "G:2I" SGS = "G:1N" SSD = "G:C2" ESP = "G:55" LKA = "G:1J" SDN = "G:C1" SUR = "G:86" SJM = "G:1M" SWE = "G:6V" CHE = "G:30" SYR = "G:4P" TWN = "G:7U" TJK = "G:4N" TZA = "G:2T" THA = "G:3R" TGO = "G:91" TKL = "G:5P" TON = "G:8K" TTO = "G:9T" TUN = "G:2W" TUR = "G:8Z" TKM = "G:42" TCA = "G:9I" TUV = "G:2C" UGA = "G:47" UKR = "G:71" ARE = "G:A4" GBR = "G:7J" UMI = "G:9W" USA = "G:6J" URY = "G:4Y" UZB = "G:8M" VAT = "G:8Y" VUT = "G:9M" VEN = "G:2K" VNM = "G:5Z" WLF = "G:4L" ESH = "G:4F" YEM = "G:28" ZMB = "G:73" ZWE = "G:52" OptCountryCode = Optional[Union[str, CountryCode]] country_code_arg_parser = make_enum_arg_parser_by_members(CountryCode)
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/symbol_conversion/_country_code.py
0.667256
0.385693
_country_code.py
pypi
from enum import unique from typing import Union, List, Optional, Iterable from ..._tools import make_convert_to_enum, EnumArgsParser, make_parse_enum from ..._base_enum import StrEnum @unique class AssetClass(StrEnum): """ Asset class values to build 'filter' parameter in request for SymbolConversion content object. """ COMMODITIES = "Commodities" EQUITY_OR_INDEX_OPTIONS = "EquityOrIndexOptions" BOND_AND_STIR_FUTURES_AND_OPTIONS = "BondAndSTIRFuturesAndOptions" WARRANTS = "Warrants" EQUITIES = "Equities" INDICES = "Indices" EQUITY_INDEX_FUTURES = "EquityIndexFutures" FUNDS = "Funds" CERTIFICATES = "Certificates" BONDS = "Bonds" RESERVE_CONVERTIBLE = "ReverseConvertible" MINI_FUTURE = "MiniFuture" FX_AND_MONEY = "FXAndMoney" OptAssetClass = Optional[Union[str, List[str], AssetClass, List[AssetClass]]] asset_class_enum_arg_parser = EnumArgsParser( parse=make_parse_enum(AssetClass), parse_to_enum=make_convert_to_enum(AssetClass) ) search_all_category_by_asset_class = { AssetClass.COMMODITIES: "Commodities", AssetClass.EQUITY_OR_INDEX_OPTIONS: "Options", AssetClass.BOND_AND_STIR_FUTURES_AND_OPTIONS: "Exchange-Traded Rates", AssetClass.EQUITIES: "Equities", AssetClass.EQUITY_INDEX_FUTURES: "Futures", AssetClass.FUNDS: "Funds", AssetClass.BONDS: "Bond Pricing", AssetClass.FX_AND_MONEY: "FX & Money", } rcsasset_category_genealogy_by_asset_class = { AssetClass.WARRANTS: "A:AA", AssetClass.CERTIFICATES: "A:6N", AssetClass.INDICES: "I:17", AssetClass.RESERVE_CONVERTIBLE: "A:LE", AssetClass.MINI_FUTURE: "A:P6", } def _transform_to_string(values: Iterable, category: dict) -> str: return " ".join(f"'{category[value]}'" for value in values) def create_asset_class_request_strings(asset_class: list) -> tuple: search_all_category_values = filter(lambda x: x in search_all_category_by_asset_class, asset_class) rcs_asset_category_values = filter(lambda x: x in rcsasset_category_genealogy_by_asset_class, asset_class) search_all_category_string_values = _transform_to_string( search_all_category_values, search_all_category_by_asset_class ) search_all_rcs_asset_category_string_values = _transform_to_string( rcs_asset_category_values, rcsasset_category_genealogy_by_asset_class ) search_all_category_string = "" rcs_asset_category_string = "" if search_all_category_string_values: search_all_category_string = f"SearchAllCategoryv3 in ({search_all_category_string_values})" if search_all_rcs_asset_category_string_values: rcs_asset_category_string = f"RCSAssetCategoryGenealogy in ({search_all_rcs_asset_category_string_values})" return search_all_category_string, rcs_asset_category_string
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/symbol_conversion/_asset_class.py
0.773644
0.19888
_asset_class.py
pypi
from typing import TYPE_CHECKING, Union from ._asset_class import ( create_asset_class_request_strings, asset_class_enum_arg_parser, ) from ._country_code import country_code_arg_parser from ._symbol_type import SYMBOL_TYPE_VALUES, symbol_types_arg_parser from .._content_provider_layer import ContentProviderLayer from ..search import Views from ..._content_type import ContentType from ..._tools import create_repr, Copier from ._asset_state import asset_state_enum_arg_parser from ._asset_state import AssetState from ._asset_class import AssetClass if TYPE_CHECKING: from ._asset_state import OptAssetState from ._country_code import OptCountryCode from ._symbol_type import OptSymbolTypes from ._asset_class import OptAssetClass from ._country_code import CountryCode from ._symbol_type import SymbolTypes from ..._types import ExtendedParams, StrStrings DEFAULT_SCOPE = "_AllUnique" def _prepare_filter(asset_state: AssetState, asset_class: Union[list, AssetClass]) -> str: asset_state = asset_state or AssetState.ACTIVE if asset_state is AssetState.ACTIVE: ret_val = "AssetState eq 'AC'" else: ret_val = "(AssetState ne 'AC' and AssetState ne null)" if asset_class and not isinstance(asset_class, list): asset_class = [asset_class] if asset_class: search_all_category, rcs_asset_category = create_asset_class_request_strings(asset_class) if search_all_category and rcs_asset_category: ret_val = f"{ret_val} and ({search_all_category} or {rcs_asset_category})" else: ret_val = f"{ret_val} and ({search_all_category}{rcs_asset_category})" return ret_val class Definition(ContentProviderLayer): """ Creates a definition of information about the data that will be passed to the Search/Lookup API of the Refinitiv Data Platform. Parameters ---------- symbols: str or list of str Single instrument or list of instruments to convert. from_symbol_type: str or SymbolTypes, optional Instrument code to convert from. Possible values: 'CUSIP', 'ISIN', 'SEDOL', 'RIC', 'ticker', 'lipperID', 'IMO' Default: '_AllUnique' to_symbol_types: SymbolTypes, str or list of str or SymbolTypes, optional Instrument code to convert to. Possible values: 'CUSIP', 'ISIN', 'SEDOL', 'RIC', 'ticker', 'lipperID', 'IMO', 'OAPermID' Default: all symbol types are requested extended_params: dict, optional Specifies the parameters that will be merged with the request. preferred_country_code: str or CountryCode, optional Unique ISO 3166 code for country asset_class: str or AssetClass, optional AssetClass value to build filter parameter. asset_state: str or AssetState, optional AssetState value to build filter parameter. Examples -------- >>> from refinitiv.data.content import symbol_conversion >>> definition = symbol_conversion.Definition( ... symbols=["US5949181045", "US02079K1079"], ... from_symbol_type=symbol_conversion.SymbolTypes.ISIN, ... to_symbol_types=[ ... symbol_conversion.SymbolTypes.RIC, ... symbol_conversion.SymbolTypes.OA_PERM_ID ... ], ... preferred_country_code=symbol_conversion.CountryCode.USA, ... asset_class=[ ... symbol_conversion.AssetClass.COMMODITIES, ... symbol_conversion.AssetClass.EQUITIES, ... symbol_conversion.AssetClass.WARRANTS ... ], ... asset_state=symbol_conversion.AssetState.INACTIVE ... ) >>> response = definition.get_data() """ def __init__( self, symbols: "StrStrings", from_symbol_type: Union[str, "SymbolTypes"] = DEFAULT_SCOPE, to_symbol_types: "OptSymbolTypes" = SYMBOL_TYPE_VALUES, preferred_country_code: "OptCountryCode" = None, asset_class: "OptAssetClass" = None, asset_state: "OptAssetState" = None, extended_params: "ExtendedParams" = None, ): super().__init__( content_type=ContentType.DISCOVERY_LOOKUP, view=Views.SEARCH_ALL, ) self.symbols = symbols self.from_symbol_type = from_symbol_type self.to_symbol_types = to_symbol_types self.preferred_country_code = preferred_country_code self.asset_class = asset_class self.asset_state = asset_state self.extended_params = extended_params @property def symbols(self) -> "StrStrings": return self._kwargs.get("terms") @symbols.setter def symbols(self, value: "StrStrings"): if value: value = Copier.get_list(value) self._kwargs["terms"] = ",".join(value) @property def from_symbol_type(self) -> Union[str, "SymbolTypes"]: return self._kwargs.get("scope") @from_symbol_type.setter def from_symbol_type(self, value: Union[str, "SymbolTypes"]): scope = value or DEFAULT_SCOPE if value and value != DEFAULT_SCOPE: scope = symbol_types_arg_parser.get_str(value) self._kwargs["scope"] = scope @property def to_symbol_types(self) -> "OptSymbolTypes": return self._kwargs.get("select") @to_symbol_types.setter def to_symbol_types(self, value: "OptSymbolTypes"): value = value and Copier.get_list(value) select = ["DocumentTitle"] if value is SYMBOL_TYPE_VALUES: select.extend(value) elif isinstance(value, list): select.extend(map(symbol_types_arg_parser.get_str, value)) elif value: select.append(symbol_types_arg_parser.get_str(value)) self._kwargs["select"] = ",".join(select) @property def preferred_country_code(self) -> "OptCountryCode": return self._kwargs.get("boost") @preferred_country_code.setter def preferred_country_code(self, value: "OptCountryCode"): if value: value = f"RCSExchangeCountry eq '{country_code_arg_parser.get_str(value)}'" self._kwargs["boost"] = value @property def _filter(self) -> str: return self._kwargs.get("filter") def _update_filter(self): self._kwargs["filter"] = _prepare_filter(self.asset_state, self.asset_class) @property def asset_state(self) -> "OptAssetState": return self._kwargs.get("asset_state") @asset_state.setter def asset_state(self, value: "OptAssetState"): if value: self._kwargs["asset_state"] = asset_state_enum_arg_parser.get_enum(value) self._update_filter() @property def asset_class(self) -> "OptAssetClass": return self._kwargs.get("asset_class") @asset_class.setter def asset_class(self, value: "OptAssetClass"): if value: value = Copier.get_list(value) self._kwargs["asset_class"] = asset_class_enum_arg_parser.get_enum(value) self._update_filter() @property def extended_params(self) -> "ExtendedParams": return self._kwargs.get("extended_params") @extended_params.setter def extended_params(self, value: "ExtendedParams"): if value: self._kwargs["extended_params"] = value def __repr__(self): return create_repr( self, middle_path="content.symbols_convention", content=f"{{symbols='{self.symbols}'}}", )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/symbol_conversion/_definition.py
0.86267
0.161651
_definition.py
pypi
from typing import TYPE_CHECKING from .._content_data import Data from .._content_provider_layer import ContentUsageLoggerMixin from ..._content_type import ContentType from ..._tools import custom_insts_historical_universe_parser, try_copy_to_list, custom_inst_datetime_adapter from ...delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ..._types import OptDateTime, StrStrings, OptInt, ExtendedParams, OptStrStrs class Definition(ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]]): """ Summary line of this class that defines parameters for requesting events from custom instruments Parameters ---------- universe : str or list The Id or Symbol of custom instrument to operate on start : str or date or datetime or timedelta, optional The start date and timestamp of the query in ISO8601 with UTC only end : str or date or datetime or timedelta, optional The end date and timestamp of the query in ISO8601 with UTC only count : int, optional The maximum number of data returned. Values range: 1 - 10000 fields : list, optional The list of fields that are to be returned in the response extended_params : dict, optional If necessary other parameters Examples -------- >>> from refinitiv.data.content.custom_instruments import events >>> definition_events = events.Definition("VOD.L") >>> response = definition_events.get_data() """ _USAGE_CLS_NAME = "CustomInstruments.EventsDefinition" def __init__( self, universe: "StrStrings", start: "OptDateTime" = None, end: "OptDateTime" = None, count: "OptInt" = None, fields: "OptStrStrs" = None, extended_params: "ExtendedParams" = None, ): start = custom_inst_datetime_adapter.get_localize(start) end = custom_inst_datetime_adapter.get_localize(end) fields = try_copy_to_list(fields) universe = try_copy_to_list(universe) universe = custom_insts_historical_universe_parser.get_list(universe) super().__init__( data_type=ContentType.CUSTOM_INSTRUMENTS_EVENTS, universe=universe, start=start, end=end, count=count, fields=fields, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/custom_instruments/_events.py
0.89809
0.250844
_events.py
pypi
import abc from ._custom_instruments_data_provider import ( simple_custom_insts_body_params_config, custom_inst_type_arg_parser, ) from ._enums import CustomInstrumentTypes from ._instrument_prop_classes import Basket, UDC from ..._content_type import ContentType from ..._core.session import Session from ...delivery._data._data_provider import DataProviderLayer from ...delivery._data._endpoint_data import RequestMethod class Instrument(abc.ABC): def __init__(self, data: dict, session: "Session" = None): """ symbol : str Instrument symbol in the format "S)someSymbol.YOURUUID" or "someSymbol" formula : str, optional Formula consisting of rics (fields can be specified by comma). basket : dict, Basket, optional For weighted baskets / indices. udc : dict, UDC, optional Custom trading sessions, see sample format below. instrument_name : str, optional Human-readable name of the instrument. Maximum of 16 characters. exchange_name : str, optional 4-letter code of the listing exchange. currency : str, optional 3-letter code of the currency of the instrument, e.g. GBP. time_zone: str, optional Time Series uses an odd custom 3-letter value for time zone IDs, e.g. "LON" for London. holidays : List[Union[dict, Holiday]], optional List of custom calendar definitions. description : str, optional Free text field from the user to put any notes or text. Up to 1000 characters. session : Session, optional session=None - means default session would be used """ self._data = data self._session = session @property def symbol(self): return self._data.get("symbol") @symbol.setter def symbol(self, value): self._data["symbol"] = value @property def instrument_name(self): return self._data.get("instrumentName") @instrument_name.setter def instrument_name(self, value): self._data["instrumentName"] = value @property def exchange_name(self): return self._data.get("exchangeName") @exchange_name.setter def exchange_name(self, value): self._data["exchangeName"] = value @property def currency(self): return self._data.get("currency") @currency.setter def currency(self, value): self._data["currency"] = value @property def time_zone(self): return self._data.get("timeZone") @time_zone.setter def time_zone(self, value): self._data["timeZone"] = value @property def holidays(self): return self._data.get("holidays") @holidays.setter def holidays(self, value): self._data["holidays"] = value @property def description(self): return self._data.get("description") @description.setter def description(self, value): self._data["description"] = value @property def id(self): return self._data.get("id") @property def owner(self): return self._data.get("owner") @property def type_(self): return self._data.get("type") def delete(self): """ Examples -------- >>> from refinitiv.data.content.custom_instruments.manage import get >>> instrument = get("MyInstrument") >>> instrument.delete() """ data_provider_layer = DataProviderLayer( data_type=ContentType.CUSTOM_INSTRUMENTS_INSTRUMENTS, universe=self.symbol, method=RequestMethod.DELETE, ) data_provider_layer.get_data(self._session) def save(self): """ Examples: -------- >>> from refinitiv.data.content.custom_instruments.manage import create_formula, Holiday >>> instrument = create_formula( ... symbol="MyNewInstrument", ... formula="EUR=*3", ... holidays=[ ... Holiday(date="1991-08-23", name="Independence Day of Ukraine"), ... {"date": "2022-12-18", "reason": "Hanukkah"}, ... ], ... ) ... instrument.currency = "GBP" ... instrument.description = "short trading instrument" ... instrument.exchange_name = "9978" ... instrument.save() """ provider = DataProviderLayer( data_type=ContentType.CUSTOM_INSTRUMENTS_INSTRUMENTS, universe=self.id, body_params_config=simple_custom_insts_body_params_config, method=RequestMethod.PUT, **self._data, ) response = provider.get_data(self._session) self._data = response.data.raw class CustomInstrumentFormula(Instrument): @property def formula(self): return self._data.get("formula") @formula.setter def formula(self, value): self._data["formula"] = value class CustomInstrumentBasket(Instrument): @property def basket(self): return Basket._from_dict(self._data.get("basket")) @basket.setter def basket(self, value): self._data["basket"] = value class CustomInstrumentUDC(Instrument): @property def udc(self) -> UDC: return UDC._from_dict(self._data.get("udc")) @udc.setter def udc(self, value): self._data["udc"] = value class_by_type = { CustomInstrumentTypes.Formula: CustomInstrumentFormula, CustomInstrumentTypes.Basket: CustomInstrumentBasket, CustomInstrumentTypes.UDC: CustomInstrumentUDC, } def create_instr_factory(data, session): _type = data.get("type") if not _type: raise AttributeError("type parameter is not existed for object") enum_type = custom_inst_type_arg_parser.get_enum(_type) _class = class_by_type.get(enum_type) if not _class: raise AttributeError(f"There is no valid class for {enum_type}") ci = _class(data, session=session) return ci
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/custom_instruments/_instrument_class.py
0.865096
0.252534
_instrument_class.py
pypi
import warnings from typing import Optional, List, Union, Callable, TYPE_CHECKING from ._enums import CustomInstrumentTypes from ._instrument_class import ( create_instr_factory, CustomInstrumentFormula, CustomInstrumentBasket, CustomInstrumentUDC, ) from ..._core.session import Session from ._instrument_prop_classes import Basket, UDC from ..._content_type import ContentType from ..ipa.dates_and_calendars.holidays._holidays_data_provider import Holiday from ...delivery._data._data_provider import DataProviderLayer, Response from ...delivery._data._endpoint_data import RequestMethod if TYPE_CHECKING: from ..._types import ExtendedParams def delete( universe: str, extended_params: "ExtendedParams" = None, session: "Session" = None, ) -> Response: """ universe : str Instrument symbol in the format "S)someSymbol.YOURUUID". extended_params : ExtendedParams, optional If necessary other parameters. session : Session, optional session=None. Means default session would be used Examples -------- >>> from refinitiv.data.content.custom_instruments.manage import delete >>> response = delete("MyInstrument") """ data_provider_layer = DataProviderLayer( data_type=ContentType.CUSTOM_INSTRUMENTS_INSTRUMENTS, universe=universe, extended_params=extended_params, method=RequestMethod.DELETE, ) return data_provider_layer.get_data(session) def get( universe: str, extended_params: "ExtendedParams" = None, session: "Session" = None ) -> Union[CustomInstrumentFormula, CustomInstrumentBasket, CustomInstrumentUDC]: """ universe : str Instrument symbol in the format "S)someSymbol.YOURUUID". extended_params : ExtendedParams, optional If necessary other parameters. session : Session, optional session=None - means default session would be used Examples -------- >>> from refinitiv.data.content.custom_instruments.manage import get >>> response = get("MyInstrument") """ data_provider_layer = DataProviderLayer( data_type=ContentType.CUSTOM_INSTRUMENTS_INSTRUMENTS, universe=universe, method=RequestMethod.GET, extended_params=extended_params, ) response = data_provider_layer.get_data(session=session) return create_instr_factory(response.data.raw, session=session) # deprecated method def create( symbol: str, formula: Optional[str] = None, basket: Optional[dict] = None, udc: Optional[dict] = None, instrument_name: Optional[str] = None, exchange_name: Optional[str] = None, currency: Optional[str] = None, time_zone: Optional[str] = None, holidays: Optional[List[Union[dict, Holiday]]] = None, description: Optional[str] = None, type_: Union[str, CustomInstrumentTypes] = CustomInstrumentTypes.Formula, extended_params: "ExtendedParams" = None, session: "Session" = None, on_response: Callable = None, ) -> Union[CustomInstrumentFormula, CustomInstrumentBasket, CustomInstrumentUDC]: """ With this method you can create a CustomInstrumentFormula, CustomInstrumentBasket, CustomInstrumentUDC objects. Parameters ---------- symbol: str Instrument symbol in the format "S)someSymbol.YOURUUID". formula : str Formula consisting of rics (fields can be specified by comma). basket : dict, Basket, optional Method of defining custom instruments, relying on a list of instruments and weights in order to build custom indices or other simple synthetic instruments udc : dict, UDC, optional User-Defined Continuations. Custom trading sessions, see sample format below. currency : str, optional 3-letter code of the currency of the instrument, e.g. GBP. instrument_name : str, optional Human-readable name of the instrument. Maximum of 16 characters. exchange_name : str, optional 4-letter code of the listing exchange. holidays : list[dict, Holiday], optional List of custom calendar definitions. time_zone : str, optional Time Series uses an odd custom 3-letter value for time zone IDs, e.g. "LON" for London. description : str, optional Free text field from the user to put any notes or text. Up to 1000 characters. type_ : str, CustomInstrumentTypes, optional Type of Synthetic Instrument - "formula", "basket","udc". Default value is "formula". extended_params : ExtendedParams, optional If necessary other parameters. session : Session, optional session=None - means default session would be used on_response : Callable, optional Callable object to process retrieved data Returns ------- CustomInstrumentFormula Examples -------- >>> from refinitiv.data.content.custom_instruments.manage import create_formula >>> import refinitiv.data.content.custom_instruments as ci >>> from refinitiv.data.content.ipa import dates_and_calendars >>> calendar_holiday = dates_and_calendars.holidays.Definition( ... start_date="2015-08-24", ... end_date="2018-09-24", ... calendars=["UKR"], ... holiday_outputs=["Date", "Names"], >>> ).get_data() ... >>> response = create_formula( ... symbol="MyNewInstrument", ... formula="EUR=*3", ... holidays=[ ... *calendar_holiday.data.holidays, ... ci.manage.Holiday(date="1991-08-24", name="Independence Day of Ukraine"), ... {"date": "2022-12-18", "reason": "Hanukkah"}, ... ], >>> ) """ warnings.warn( "'create()' is legacy interface. Will be changed to 'create_formula()', 'create_basket()', 'create_udc()'", ) data = _create( symbol, formula, basket, udc, instrument_name, exchange_name, currency, time_zone, holidays, description, type_, extended_params, session, on_response, ) return create_instr_factory(data, session=session) def _create( symbol: str, formula: Optional[str] = None, basket: Union[dict, Basket] = None, udc: Union[dict, UDC] = None, instrument_name: Optional[str] = None, exchange_name: Optional[str] = None, currency: Optional[str] = None, time_zone: Optional[str] = None, holidays: Optional[List[Union[dict, Holiday]]] = None, description: Optional[str] = None, type_: Union[str, CustomInstrumentTypes] = None, extended_params: "ExtendedParams" = None, session: "Session" = None, on_response: Callable = None, ) -> dict: data_provider_layer = DataProviderLayer( data_type=ContentType.CUSTOM_INSTRUMENTS_INSTRUMENTS, symbol=symbol, formula=formula, instrument_name=instrument_name, exchange_name=exchange_name, currency=currency, time_zone=time_zone, holidays=holidays, description=description, type_=type_, basket=basket, udc=udc, extended_params=extended_params, method=RequestMethod.POST, ) response = data_provider_layer.get_data(session, on_response) return response.data.raw def create_formula( symbol: str, formula: Optional[str] = None, currency: Optional[str] = None, instrument_name: Optional[str] = None, exchange_name: Optional[str] = None, holidays: Optional[List[Union[dict, Holiday]]] = None, time_zone: Optional[str] = None, description: Optional[str] = None, extended_params: "ExtendedParams" = None, session: "Session" = None, on_response: Callable = None, ) -> CustomInstrumentFormula: """ With this method you can create a CustomInstrumentFormula object. Parameters ---------- symbol: str Instrument symbol in the format "S)someSymbol.YOURUUID". formula : str Formula consisting of rics (fields can be specified by comma). currency : str, optional 3-letter code of the currency of the instrument, e.g. GBP. instrument_name : str, optional Human-readable name of the instrument. Maximum of 16 characters. exchange_name : str, optional 4-letter code of the listing exchange. holidays : list[dict, Holiday], optional List of custom calendar definitions. time_zone : str, optional Time Series uses an odd custom 3-letter value for time zone IDs, e.g. "LON" for London. description : str, optional Free text field from the user to put any notes or text. Up to 1000 characters. extended_params : ExtendedParams, optional If necessary other parameters. session : Session, optional session=None - means default session would be used on_response : Callable, optional Callable object to process retrieved data Returns ------- CustomInstrumentFormula Examples -------- >>> from refinitiv.data.content.custom_instruments.manage import create_formula >>> import refinitiv.data.content.custom_instruments as ci >>> response = create_formula( ... symbol="MyNewInstrument", ... formula="EUR=*3", ... holidays=[ ... ci.manage.Holiday(date="1991-08-23", name="Independence Day of Ukraine"), ... {"date": "2022-12-18", "reason": "Hanukkah"}, ... ], >>> ) """ data = _create( symbol=symbol, type_=CustomInstrumentTypes.Formula, formula=formula, currency=currency, instrument_name=instrument_name, exchange_name=exchange_name, holidays=holidays, time_zone=time_zone, description=description, extended_params=extended_params, session=session, on_response=on_response, ) return CustomInstrumentFormula(data, session=session) def create_basket( symbol: str, basket: Union[dict, Basket], currency: str, instrument_name: Optional[str] = None, exchange_name: Optional[str] = None, holidays: Optional[List[Union[dict, Holiday]]] = None, time_zone: Optional[str] = None, description: Optional[str] = None, extended_params: "ExtendedParams" = None, session: "Session" = None, on_response: Callable = None, ) -> CustomInstrumentBasket: """ With this method you can create a CustomInstrumentBasket object. Parameters ---------- symbol: str Instrument symbol in the format "S)someSymbol.YOURUUID". basket : dict, Basket Method of defining custom instruments, relying on a list of instruments and weights in order to build custom indices or other simple synthetic instruments currency : str 3-letter code of the currency of the instrument, e.g. GBP. instrument_name : str, optional Human-readable name of the instrument. Maximum of 16 characters. exchange_name : str, optional 4-letter code of the listing exchange. holidays : list[dict, Holiday], optional List of custom calendar definitions. time_zone : str, optional Time Series uses an odd custom 3-letter value for time zone IDs, e.g. "LON" for London. description : str, optional Free text field from the user to put any notes or text. Up to 1000 characters. extended_params : ExtendedParams, optional If necessary other parameters. session : Session, optional session=None - means default session would be used on_response : Callable, optional Callable object to process retrieved data Returns ------- CustomInstrumentBasket Examples -------- >>> from refinitiv.data.content.custom_instruments.manage import create_basket >>> import refinitiv.data.content.custom_instruments as ci >>> import datetime ... response = create_basket( ... symbol="MyBasketInstrument", ... holidays=[ ... ci.manage.Holiday(date="1991-10-24", name="Labour Day"), ... ci.manage.Holiday(date=datetime.date(2021, 8, 24), name="Independence Day of Ukraine"), ... ci.manage.Holiday(date=datetime.timedelta(days=-30), name="Alaska Day"), ... {"date": "2022-04-23", "reason": "Shakespeare Day"}, ... ], ... basket=Basket( ... constituents=[ ... ci.manage.Constituent(ric="LSEG.L", weight=50), ... ci.manage.Constituent(ric="EPAM.N", weight=50), ... ], ... normalize_by_weight=True, ... ), ... currency="USD", ... ) """ data = _create( symbol=symbol, type_=CustomInstrumentTypes.Basket, basket=basket, currency=currency, instrument_name=instrument_name, exchange_name=exchange_name, holidays=holidays, time_zone=time_zone, description=description, extended_params=extended_params, session=session, on_response=on_response, ) return CustomInstrumentBasket(data, session=session) def create_udc( symbol: str, udc: Union[dict, UDC], currency: Optional[str] = None, instrument_name: Optional[str] = None, exchange_name: Optional[str] = None, holidays: Optional[List[Union[dict, Holiday]]] = None, time_zone: Optional[str] = None, description: Optional[str] = None, extended_params: "ExtendedParams" = None, session: "Session" = None, on_response: Callable = None, ) -> CustomInstrumentUDC: """ With this method you can create a CustomInstrumentUDC object. Parameters ---------- symbol: str Instrument symbol in the format "S)someSymbol.YOURUUID". udc : dict, UDC User-Defined Continuations. Custom trading sessions, see sample format below. currency : str, optional 3-letter code of the currency of the instrument, e.g. GBP. instrument_name : str, optional Human-readable name of the instrument. Maximum of 16 characters. exchange_name : str, optional 4-letter code of the listing exchange. holidays : list[dict, Holiday], optional List of custom calendar definitions. time_zone : str, optional Time Series uses an odd custom 3-letter value for time zone IDs, e.g. "LON" for London. description : str, optional Free text field from the user to put any notes or text. Up to 1000 characters. extended_params : ExtendedParams, optional If necessary other parameters. session : Session, optional session=None - means default session would be used on_response : Callable, optional Callable object to process retrieved data Returns ------- CustomInstrumentBasket Examples -------- >>> from refinitiv.data.content.custom_instruments.manage import create_udc >>> import refinitiv.data.content.custom_instruments as ci >>> import datetime ... >>> response_1 = create_udc( ... symbol="MyUDCInstrument_VB", ... instrument_name="Co Systems Inc", ... udc=ci.manage.UDC( ... root="CC", ... months=ci.manage.Months( ... number_of_years=3, ... include_all_months=True, ... start_month=1, ... ), ... rollover=ci.manage.VolumeBasedRollover( ... method=ci.VolumeBasedRolloverMethod.VOLUME, ... number_of_days=1, ... join_at_day=1, ... roll_occurs_within_months=4, ... roll_on_expiry=True, ... ), ... spread_adjustment=ci.manage.SpreadAdjustment( ... adjustment="arithmetic", ... method=ci.SpreadAdjustmentMethod.CLOSE_TO_CLOSE, ... backwards=True, ... ), ... ), ... >>> response_2 = create_udc( ... symbol="MyUDCInstrument_DB", ... instrument_name="ELBD Gbmx", ... udc=ci.manage.UDC( ... root="CC", ... months=ci.manage.Months( ... number_of_years=3, ... include_all_months=True, ... start_month=1, ... ), ... rollover=ci.manage.DayBasedRollover( ... method=ci.DayBasedRolloverMethod.DAYS_BEFORE_END_OF_MONTH, ... number_of_days=3, ... months_prior=1, ... ), ... spread_adjustment=ci.manage.SpreadAdjustment( ... adjustment="arithmetic", ... method=ci.SpreadAdjustmentMethod.CLOSE_TO_CLOSE, ... backwards=True, ... ), ... ), ... >>> response_3 = create_udc( ... symbol="MyUDCInstrument_Manual", ... instrument_name="REPKO Sys", ... udc=ci.manage.UDC( ... root="CC", ... rollover=ci.manage.ManualRollover( ... ci.manage.ManualItem(month=7, year=2022, start_date="2022-02-01"), ... ci.manage.ManualItem(month=7, year=2021, start_date=datetime.date(2021, 3, 1)), ... ci.manage.ManualItem(month=3, year=2020, start_date=datetime.timedelta(days=-950)) ... ), ... spread_adjustment=ci.manage.SpreadAdjustment( ... adjustment="arithmetic", ... method=ci.SpreadAdjustmentMethod.CLOSE_TO_CLOSE, ... backwards=True, ... ), ... ), """ data = _create( symbol=symbol, type_=CustomInstrumentTypes.UDC, udc=udc, currency=currency, instrument_name=instrument_name, exchange_name=exchange_name, holidays=holidays, time_zone=time_zone, description=description, extended_params=extended_params, session=session, on_response=on_response, ) return CustomInstrumentUDC(data, session=session)
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/custom_instruments/_manage.py
0.925474
0.249973
_manage.py
pypi
import re from dataclasses import dataclass from json import JSONDecodeError from typing import TYPE_CHECKING, Tuple, Union, List, Callable import pandas as pd import requests from ._enums import CustomInstrumentTypes from ._instrument_prop_classes import Serializable from .._content_data_provider import ContentDataProvider from .._historical_content_validator import HistoricalContentValidator from .._historical_data_provider import ( EventsDataProvider, SummariesDataProvider, ) from .._historical_response_factory import HistoricalResponseFactory from .._intervals import DayIntervalType, get_day_interval_type, Intervals from ..historical_pricing._historical_pricing_request_factory import check_count from ..ipa.dates_and_calendars.holidays._holidays_data_provider import Holiday from ..._content_type import ContentType from ..._core.session import get_valid_session from ..._errors import RDError from ..._tools import ( get_response_reason, make_enum_arg_parser, custom_inst_datetime_adapter, ParamItem, ValueParamItem, EnumArgsParser, make_parse_enum, make_convert_to_enum, is_date_true, cached_property, extend_params, ) from ..._tools._dataframe import convert_df_columns_to_datetime, convert_dtypes from ...delivery._data._data_provider import ( RequestFactory, Parser, success_http_codes, ContentValidator, ContentTypeValidator, ValidatorContainer, ParsedData, ) from ...delivery._data._endpoint_data import RequestMethod if TYPE_CHECKING: import httpx content_type_by_day_interval_type = { DayIntervalType.INTER: ContentType.CUSTOM_INSTRUMENTS_INTERDAY_SUMMARIES, DayIntervalType.INTRA: ContentType.CUSTOM_INSTRUMENTS_INTRADAY_SUMMARIES, } # a20140be-3648-4892-9d1b-ce78ee8617fd is_instrument_id = re.compile(r"[a-z0-9]{8}(-[a-z0-9]{4}){3}-[a-z0-9]{12}") # S)INST.GESG1-0000 symbol_with_user_id = re.compile(r".*\.[A-Z0-9]+-[A-Z0-9]+") def provide_session(func): def _func(value, session, *args, **kwargs): return func(value, session) return _func def get_content_type_by_interval(interval) -> ContentType: day_interval_type = get_day_interval_type(interval) return content_type_by_day_interval_type.get(day_interval_type) # -------------------------------------------------------------------------------------- # Response factory # -------------------------------------------------------------------------------------- def custom_instruments_build_df(content_data: dict, **kwargs) -> pd.DataFrame: if isinstance(content_data, dict): content_data = [content_data] dataframe = pd.DataFrame(content_data) dataframe = convert_dtypes(dataframe) return dataframe def custom_instruments_intervals_build_df(content_data: dict, **kwargs) -> pd.DataFrame: data = content_data.get("data") headers = content_data.get("headers", []) columns = [header.get("name") for header in headers] dataframe = pd.DataFrame(data, columns=columns) convert_df_columns_to_datetime(dataframe, entry="DATE", utc=True, delete_tz=True) dataframe.fillna(pd.NA, inplace=True) return dataframe # -------------------------------------------------------------------------------------- # Request factory # -------------------------------------------------------------------------------------- def get_user_id(session=None) -> str: session = get_valid_session(session) return session._user_uuid def convert_to_symbol(symbol, session=None, uuid=""): # "MyNewInstrument" retval = symbol if not retval.startswith("S)"): retval = f"S){retval}" # "S)MyNewInstrument" if not symbol_with_user_id.match(retval): if not uuid: uuid = get_user_id(session) retval = f"{retval}.{uuid}" # "S)MyNewInstrument.GE-1234" return retval def convert_to_holidays(array: List[Union[dict, Holiday]]) -> List: converted_holidays = [] for holiday in array: if isinstance(holiday, dict): if "date" in holiday and "reason" in holiday: converted_holidays.append(holiday) else: raise ValueError("Holiday object should have 'date' and 'reason'") elif isinstance(holiday, Holiday): converted_holidays.append({"date": holiday.date, "reason": holiday.name}) else: raise TypeError("holidays parameter can take only dict or Holiday objects") return converted_holidays def convert_to_dict(obj: Union[dict, dataclass]) -> dict: if isinstance(obj, Serializable): return obj._to_dict() elif isinstance(obj, dict): return obj raise TypeError(f"Parameter can take only dict or UDC/Basket object") def get_valid_symbol(symbol, uuid): return convert_to_symbol(symbol, uuid=uuid) def get_valid_symbol_request(symbol, session): return convert_to_symbol(symbol, session) class BaseRequestFactory(RequestFactory): def get_url(self, *args, **kwargs): url = super().get_url(*args, **kwargs) if self.get_request_method(**kwargs) != RequestMethod.POST: url += "/{universe}" return url def get_path_parameters(self, session, *, universe=None, **kwargs): if self.get_request_method(**kwargs) == RequestMethod.POST: return {} if universe is None: raise RDError(-1, "universe can't be None") if not is_instrument_id.match(universe): universe = get_valid_symbol_request(universe, session) return {"universe": universe} def extend_query_parameters(self, query_parameters, extended_params=None): return extend_params(query_parameters, extended_params) def extend_body_parameters(self, body_parameters, **kwargs): return body_parameters custom_inst_type_arg_parser = EnumArgsParser( parse=make_parse_enum(CustomInstrumentTypes), parse_to_enum=make_convert_to_enum(CustomInstrumentTypes), ) class CustomInstsRequestFactory(BaseRequestFactory): @property def body_params_config(self): return custom_insts_body_params def get_body_parameters(self, session, *args, **kwargs): body_parameters = {} if self.get_request_method(**kwargs) not in { RequestMethod.POST, RequestMethod.PUT, }: return body_parameters return super().get_body_parameters(session, *args, **kwargs) def extend_body_parameters(self, body_parameters, extended_params=None, **kwargs): if extended_params: result = dict(body_parameters) result.update(extended_params) return result return body_parameters # -------------------------------------------------------------------------------------- # Raw data parser # -------------------------------------------------------------------------------------- class CustomInstsParser(Parser): def parse_raw_response(self, raw_response: "httpx.Response") -> Tuple[bool, ParsedData]: is_success = False if raw_response is None: return is_success, ParsedData({}, {}) is_success = raw_response.status_code in success_http_codes + [requests.codes.no_content] if is_success: parsed_data = self.process_successful_response(raw_response) else: parsed_data = self.process_failed_response(raw_response) return is_success, parsed_data def process_failed_response(self, raw_response: "httpx.Response") -> ParsedData: status = { "http_status_code": raw_response.status_code, "http_reason": get_response_reason(raw_response), } try: content_data = raw_response.json() if isinstance(content_data, list): content_data = content_data[0] content_error = content_data.get("error") if content_error: status["error"] = content_error error_code = content_error.get("code") if isinstance(error_code, str) and not error_code.isdigit(): error_code = raw_response.status_code error_message = content_error.get("message") errors = content_error.get("errors", {}) errors = [error.get("reason") for error in errors if error] if errors: errors = "\n".join(errors) error_message = f"{error_message}: {errors}" elif "state" in content_data: state = content_data.get("state", {}) error_code = state.get("code") data = content_data.get("data", []) reasons = [_data.get("reason", "") for _data in data] reason = "\n".join(reasons) error_message = f"{state.get('message')}: {reason}" else: error_code = raw_response.status_code error_message = raw_response.text except (TypeError, JSONDecodeError): error_code = raw_response.status_code error_message = raw_response.text if error_code == 403: if not error_message.endswith("."): error_message += ". " error_message += "Contact Refinitiv to check your permissions." return ParsedData(status, raw_response, error_codes=error_code, error_messages=error_message) # -------------------------------------------------------------------------------------- # Content data validator # -------------------------------------------------------------------------------------- class CustomInstsContentValidator(ContentValidator): @classmethod def content_data_is_not_none(cls, data: ParsedData) -> bool: if data.content_data is None and data.status.get("http_status_code") != 204: data.error_codes = 1 data.error_messages = "Content data is None" return False return True @cached_property def validators(self) -> List[Callable[["ParsedData"], bool]]: return [self.content_data_is_not_none] # -------------------------------------------------------------------------------------- # Request factory # -------------------------------------------------------------------------------------- interval_arg_parser = make_enum_arg_parser(Intervals, can_be_lower=True) class CustomInstsSearchRequestFactory(RequestFactory): def get_query_parameters(self, *args, **kwargs): access = kwargs.get("access") return [ ("access", access), ] def extend_query_parameters(self, query_parameters, extended_params=None): return extend_params(query_parameters, extended_params) def extend_body_parameters(self, body_parameters, **kwargs): return body_parameters custom_insts_events_query_params = [ ValueParamItem("start", function=custom_inst_datetime_adapter.get_str, is_true=is_date_true), ValueParamItem("end", function=custom_inst_datetime_adapter.get_str, is_true=is_date_true), ValueParamItem("count", function=check_count), ] custom_insts_summaries_query_params = [ ValueParamItem("interval", function=interval_arg_parser.get_str), ValueParamItem("start", function=custom_inst_datetime_adapter.get_str, is_true=is_date_true), ValueParamItem("end", function=custom_inst_datetime_adapter.get_str, is_true=is_date_true), ValueParamItem("count", function=check_count), ] custom_insts_body_params = [ ParamItem("exchange_name", "exchangeName"), ParamItem("instrument_name", "instrumentName"), ParamItem("time_zone", "timeZone"), ValueParamItem("type_", "type", custom_inst_type_arg_parser.parse), ParamItem("symbol", function=provide_session(convert_to_symbol)), ParamItem("currency"), ParamItem("description"), ParamItem("formula"), ValueParamItem("holidays", function=convert_to_holidays), ValueParamItem("basket", function=convert_to_dict), ValueParamItem("udc", function=convert_to_dict), ] simple_custom_insts_body_params_config = [ ParamItem("exchangeName"), ParamItem("instrumentName"), ParamItem("timeZone"), ParamItem("type"), ParamItem("symbol", function=provide_session(convert_to_symbol)), ParamItem("currency"), ParamItem("description"), ParamItem("formula"), ValueParamItem("udc", function=convert_to_dict), ValueParamItem("basket", function=convert_to_dict), ValueParamItem("holidays", function=convert_to_holidays), ] class CustomInstsEventsRequestFactory(BaseRequestFactory): @property def query_params_config(self): return custom_insts_events_query_params class CustomInstsSummariesRequestFactory(BaseRequestFactory): @property def query_params_config(self): return custom_insts_summaries_query_params # -------------------------------------------------------------------------------------- # Data provider # -------------------------------------------------------------------------------------- custom_instrument_data_provider = ContentDataProvider( request=CustomInstsRequestFactory(), parser=CustomInstsParser(), validator=ValidatorContainer( content_validator=CustomInstsContentValidator(), content_type_validator=ContentTypeValidator({"application/json", ""}), ), ) custom_instrument_search_data_provider = ContentDataProvider( request=CustomInstsSearchRequestFactory(), parser=CustomInstsParser(), validator=ValidatorContainer( content_validator=CustomInstsContentValidator(), content_type_validator=ContentTypeValidator({"application/json", ""}), ), ) custom_instruments_events_data_provider = EventsDataProvider( request=CustomInstsEventsRequestFactory(), parser=CustomInstsParser(), response=HistoricalResponseFactory(), validator=HistoricalContentValidator(), ) custom_instruments_intraday_summaries_data_provider = SummariesDataProvider( request=CustomInstsSummariesRequestFactory(), parser=CustomInstsParser(), response=HistoricalResponseFactory(), validator=HistoricalContentValidator(), ) custom_instruments_interday_summaries_data_provider = SummariesDataProvider( request=CustomInstsSummariesRequestFactory(), parser=CustomInstsParser(), response=HistoricalResponseFactory(), validator=HistoricalContentValidator(), )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/custom_instruments/_custom_instruments_data_provider.py
0.658088
0.172485
_custom_instruments_data_provider.py
pypi
from typing import Union, TYPE_CHECKING from ._custom_instruments_data_provider import get_content_type_by_interval from .._content_data import Data from .._content_provider_layer import ContentUsageLoggerMixin from .._intervals import DayIntervalType, get_day_interval_type, Intervals from ..._tools import ( validate_types, custom_insts_historical_universe_parser, try_copy_to_list, custom_inst_datetime_adapter, ) from ...delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ..._types import StrStrings, OptDateTime, OptInt, ExtendedParams class Definition(ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]]): """ Summary line of this class that defines parameters for requesting summaries from custom instruments Parameters ---------- universe : str or list The Id or Symbol of custom instrument to operate on interval : str or Intervals, optional The consolidation interval in ISO8601 start : str or date or datetime or timedelta, optional The start date and timestamp of the query in ISO8601 with UTC only end : str or date or datetime or timedelta, optional The end date and timestamp of the query in ISO8601 with UTC only count : int, optional The maximum number of data returned. Values range: 1 - 10000 fields : list, optional The list of fields that are to be returned in the response extended_params : dict, optional If necessary other parameters Examples -------- >>> from refinitiv.data.content.custom_instruments import summaries >>> definition_summaries = summaries.Definition("VOD.L") >>> response = definition_summaries.get_data() """ _USAGE_CLS_NAME = "CustomInstruments.SummariesDefinition" def __init__( self, universe: "StrStrings", interval: Union[str, Intervals] = None, start: "OptDateTime" = None, end: "OptDateTime" = None, count: "OptInt" = None, fields: "StrStrings" = None, extended_params: "ExtendedParams" = None, ) -> None: start = custom_inst_datetime_adapter.get_localize(start) end = custom_inst_datetime_adapter.get_localize(end) day_interval_type = get_day_interval_type(interval or DayIntervalType.INTER) content_type = get_content_type_by_interval(day_interval_type) validate_types(count, [int, type(None)], "count") fields = try_copy_to_list(fields) universe = try_copy_to_list(universe) universe = custom_insts_historical_universe_parser.get_list(universe) super().__init__( data_type=content_type, universe=universe, interval=interval, start=start, end=end, count=count, fields=fields, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/custom_instruments/_summaries.py
0.895494
0.283733
_summaries.py
pypi
from typing import TYPE_CHECKING from ._custom_instruments_data_provider import get_user_id from ._stream_facade import Stream, _init_universe from ..._tools import universe_arg_parser, fields_arg_parser, try_copy_to_list if TYPE_CHECKING: from ..._types import OptStr, ExtendedParams, StrStrings, OptStrStrs class Definition: """ This class defines parameters for requesting events from custom instrument Parameters ----------- universe : str The Id or Symbol of custom instrument to operate on. Use only for get_instrument(). fields : str or list of str, optional Specifies the specific fields to be delivered when messages arrive api: str, optional Specifies the data source. It can be updated/added using config file extended_params : dict, optional If necessary other parameters Examples -------- >>> from refinitiv.data.content.custom_instruments import Definition >>> definition = Definition(universe="S)MyNewInstrument") >>> stream = definition.get_stream() >>> stream.open() """ def __init__( self, universe: "StrStrings", fields: "OptStrStrs" = None, api: "OptStr" = None, extended_params: "ExtendedParams" = None, ): extended_params = extended_params or {} universe = extended_params.pop("universe", universe) fields = extended_params.pop("fields", fields) fields = try_copy_to_list(fields) fields = fields_arg_parser.get_unique(fields or []) universe = try_copy_to_list(universe) self._universe = universe_arg_parser.get_list(universe) self._api = api self._fields = fields self._extended_params = extended_params def get_stream(self, session=None) -> Stream: uuid = get_user_id(session) universe = _init_universe(self._universe, session, uuid) stream = Stream( universe=universe, session=session, fields=self._fields, api=self._api, extended_params=self._extended_params, uuid=uuid, ) return stream
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/custom_instruments/_definition.py
0.86267
0.200245
_definition.py
pypi
from typing import Union, List from ._feed_name import Feed from ..._tools import filling_search_datetime_adapter from ..._types import OptDateTime FORM_TYPE_TEMPLATE = '{FilingDocument: {DocumentSummary: {FormType: {EQ: "%s"}}}}' FEED_TEMPLATE = '{FilingDocument: {DocumentSummary: {FeedName: {EQ: "%s"}}}}' ORG_ID_TEMPLATE = '{FilingDocument: {Identifiers: {OrganizationId: {EQ: "%s"}}}}' KEYWORDS_TEMPLATE = "" QUERY_TEMPLATE = ( "{" " FinancialFiling($filter $keywords" " sort: {FilingDocument: {DocumentSummary: {FilingDate: $sortOrder}}}," " limit: $limit ) {" " _metadata {" " totalCount" " cursor" " }" " FilingOrganization {" " Names {" " Name{" " OrganizationName(" ' filter: {AND: [{LanguageId_v2: {EQ: "505062"}}, {NameTypeCode: {EQ: "LNG"}}]}' " ) {" " Name" " }" " }" " }" " }" " FilingDocument {" " Identifiers {" " OrganizationId" " Dcn" " }" " DocId" " FinancialFilingId" " $sections" " DocumentSummary {" " DocumentTitle" " FeedName" " FormType" " HighLevelCategory" " MidLevelCategory" " FilingDate" " SecAccessionNumber" " SizeInBytes" " }" " FilesMetaData {" " FileName" " MimeType" " }" " }" " }" " }" ) def get_dates_expression(start_date: "OptDateTime" = None, end_date: "OptDateTime" = None) -> str: dates = "" if start_date and end_date: dates = f'BETWN: {{FROM: "{filling_search_datetime_adapter.get_str(start_date)}", TO: "{filling_search_datetime_adapter.get_str(end_date)}"}}' elif start_date: dates = f'GTE: "{filling_search_datetime_adapter.get_str(start_date)}"' elif end_date: dates = f'LTE: "{filling_search_datetime_adapter.get_str(end_date)}"' filling_date = f"{{FilingDocument: {{DocumentSummary: {{FilingDate: {{{dates}}}}}}}}}" return filling_date def _get_filter_expression( form_type: Union[Feed, str] = None, feed: str = None, org_id: str = None, start_date: "OptDateTime" = None, end_date: "OptDateTime" = None, ) -> str: filters = [] if form_type: filters.append(FORM_TYPE_TEMPLATE % form_type) if feed: filters.append(FEED_TEMPLATE % feed) if org_id: filters.append(ORG_ID_TEMPLATE % org_id) if start_date or end_date: filters.append(get_dates_expression(start_date, end_date)) if not filters: return "" filter_expression = ", ".join(filters) if len(filters) > 1: filter_expression = f"{{AND: [{filter_expression}]}}" return f"filter: {filter_expression}," def _get_keywords_expression(text: str) -> str: if text: return f'keywords: {{searchstring: "FinancialFiling.FilingDocument.DocumentText:{text}"}},' return "" def _get_sections_expression(sections: List[str]) -> str: if sections: contents = " ".join([f"{section} {{Text}}" for section in sections]) return f"Sections{{ {contents} }}" return "" def get_query( form_type: str = None, feed: Union[Feed, str] = None, org_id: str = None, start_date: "OptDateTime" = None, end_date: "OptDateTime" = None, text: str = None, sections: List[str] = None, limit: int = None, sort_order: str = None, ) -> str: if limit is None: limit = 10 if sort_order is None: sort_order = "DESC" query = QUERY_TEMPLATE.replace("$limit", str(limit)).replace("$sortOrder", sort_order) filter_expression = _get_filter_expression(form_type, feed, org_id, start_date, end_date) query = query.replace("$filter", filter_expression) keywords_expression = _get_keywords_expression(text) query = query.replace("$keywords", keywords_expression) sections_expression = _get_sections_expression(sections) query = query.replace("$sections", sections_expression) return query
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/filings/_filing_query.py
0.707809
0.270705
_filing_query.py
pypi
from typing import TYPE_CHECKING from ._retrieval_data_provider import FilingsData from ..._content_type import ContentType from ..._tools import create_repr from ...delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ..._types import OptStr class Definition(DataProviderLayer[BaseResponse[FilingsData]]): """ This class describe filename, dcn (Document Control Number), doc_id (Document ID) and filing_id (Financial Filing ID) to retrieve filings documents through a signed URL. One of the parameters is required for this class. Parameters ---------- filename: str Filename is the given name of the document which also includes its file type dcn: str Document Control Number is an external identifier and an enclosed film-number specific to Edgar documents doc_id: str Document ID is a Refinitiv internal identifier assigned to financial filings documents filing_id: str Financial Filing ID is a Refinitiv internal permanent identifier assigned to each filing document Examples -------- >>> from refinitiv.data.content import filings >>> definition = filings.retrieval.Definition(filename="ecpfilings_34359955599_pdf") >>> response = definition.get_data() >>> response.data.files[0].download(path="C:\\Downloads\\download_test") Download all files at once >>> response.data.files.download(path="C:\\Downloads\\download_test") """ def __init__( self, filename: "OptStr" = None, dcn: "OptStr" = None, doc_id: "OptStr" = None, filing_id: "OptStr" = None, ): not_none_count = sum(param is not None for param in [filename, dcn, doc_id, filing_id]) if not_none_count == 0: raise ValueError("One of filename, dcn, doc_id or filing_id, is required in a Definition.") elif not_none_count > 1: raise ValueError("Only one of filename, dcn, doc_id or filing_id, can be used in a Definition") self.filename = filename self.dcn = dcn self.doc_id = doc_id self.filing_id = filing_id super().__init__( ContentType.FILINGS_RETRIEVAL, filename=self.filename, dcn=self.dcn, doc_id=self.doc_id, filing_id=self.filing_id, ) def __repr__(self): return create_repr( self, middle_path="retrieval", content=f"{{" f"filename='{self.filename}', " f"dcn='{self.dcn}', " f"doc_id='{self.doc_id}', " f"filing_id='{self.filing_id}'" f"}}", )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/filings/_retrieval_definition.py
0.854945
0.207395
_retrieval_definition.py
pypi
from typing import List from ._df_builder import concat_ownership_dfs from ._enums import StatTypes, Frequency, SortOrder from .._content_data_factory import ContentDataFactory from .._content_data_provider import ContentDataProvider from .._error_parser import ErrorParser from .._universe_content_validator import UniverseContentValidator from ..._tools import ArgsParser, extend_params, ValueParamItem, ParamItem from ..._tools import universe_arg_parser, make_enum_arg_parser from ..._tools._datetime import ownership_datetime_adapter from ...delivery._data._data_provider import RequestFactory, ValidatorContainer from ...delivery._data._response import create_response, BaseResponse MAX_LIMIT = 100 def parse_str(param): if isinstance(param, str): return param raise ValueError(f"Invalid type, expected str: {type(param)} is given") universe_ownership_arg_parser = ArgsParser(parse_str) def get_unique_universe(universe): if isinstance(universe, list): universe = list(dict.fromkeys(universe)) return universe_arg_parser.get_str(universe, delim=",") class OwnershipRequestFactory(RequestFactory): query_params_config = [ ValueParamItem( "universe", function=get_unique_universe, is_true=lambda universe: universe is not None, ), ValueParamItem("stat_type", "statType", function=make_enum_arg_parser(StatTypes).get_str), ParamItem("offset"), ParamItem("limit"), ValueParamItem("sort_order", "sortOrder", make_enum_arg_parser(SortOrder).get_str), ValueParamItem("frequency", function=make_enum_arg_parser(Frequency).get_str), ValueParamItem("start", function=ownership_datetime_adapter.get_str), ValueParamItem("end", function=ownership_datetime_adapter.get_str), ParamItem("count"), ] def extend_query_parameters(self, query_parameters, extended_params=None): return extend_params(query_parameters, extended_params) class OwnershipDataFactoryMultiResponse(ContentDataFactory): def get_dfbuilder(self, **__): return concat_ownership_dfs class OwnershipDataProvider(ContentDataProvider): data_factory_multi_response = OwnershipDataFactoryMultiResponse() def create_response(self, responses: List[BaseResponse], kwargs: dict) -> BaseResponse: if len(responses) == 1: return responses[0] kwargs["responses"] = responses return create_response(responses, self.data_factory_multi_response, kwargs) def get_data(self, *args, **kwargs): limit = kwargs.get("limit") if limit is None: response = super().get_data(*args, **kwargs) else: responses = [] for offset in range(0, limit, MAX_LIMIT): kwargs["limit"] = MAX_LIMIT if offset < limit - MAX_LIMIT else limit - offset kwargs["offset"] = offset if offset else None response = super().get_data(*args, **kwargs) responses.append(response) response = self.create_response(responses, kwargs) return response async def get_data_async(self, *args, **kwargs): limit = kwargs.get("limit") if limit is None: response = await super().get_data_async(*args, **kwargs) else: responses = [] for offset in range(0, limit, MAX_LIMIT): kwargs["limit"] = MAX_LIMIT if offset < limit - MAX_LIMIT else limit - offset kwargs["offset"] = offset if offset else None response = await super().get_data_async(*args, **kwargs) responses.append(response) response = self.create_response(responses, kwargs) return response ownership_data_provider = OwnershipDataProvider( request=OwnershipRequestFactory(), validator=ValidatorContainer(content_validator=UniverseContentValidator()), parser=ErrorParser(), )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/_ownership_data_provider.py
0.807347
0.1831
_ownership_data_provider.py
pypi
from typing import Union, TYPE_CHECKING from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from .._enums import SortOrder from ...._types import ExtendedParams, StrStrings class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ This class describe parameters to retrieve the latest 5 buy or sell activites for the requested company. Parameters ---------- universe: str, list of str The Universe parameter allows the user to define the companies for which the content is returned. sort_order: str, SortOrder The sortOrder parameter specifies ascending (asc) or descending (desc) Sort Order. use_field_names_in_headers: bool, optional Return field name as column headers for data instead of title extended_params : ExtendedParams, optional If necessary other parameters. Examples -------- >>> from refinitiv.data.content import ownership >>> definition = ownership.fund.recent_activity.Definition("TRI.N", ownership.SortOrder.ASCENDING) >>> response = definition..get_data() """ _USAGE_CLS_NAME = "Ownership.Fund.RecentActivityDefinition" def __init__( self, universe: "StrStrings", sort_order: Union[str, "SortOrder"], use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.OWNERSHIP_FUND_RECENT_ACTIVITY, universe=universe, sort_order=sort_order, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/fund/_recent_activity_definition.py
0.889078
0.210036
_recent_activity_definition.py
pypi
from typing import Union, TYPE_CHECKING from .._enums import StatTypes from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import ExtendedParams, StrStrings class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ This class describe parameters to retrieve holdings data breakdown by Investors Types, Styles, Region, Countries, Rotations and Turnovers. Parameters ---------- universe: str, list of str The Universe parameter allows the user to define the companies for which the content is returned. stat_type: int, StatTypes The statType parameter specifies which statistics type to be returned. The types available are: - Investor Type (1) - Investment Style (2) - Region (3) - Rotation (4) - Country (5) - Metro Area (6) - Investor Type Parent (7) - Invest Style Parent (8) use_field_names_in_headers: bool, optional Return field name as column headers for data instead of title extended_params : ExtendedParams, optional If necessary other parameters. Examples -------- >>> from refinitiv.data.content import ownership >>> definition = ownership.fund.breakdown.Definition("TRI.N", ownership.StatTypes.INVESTOR_TYPE) >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Ownership.Fund.BreakdownDefinition" def __init__( self, universe: "StrStrings", stat_type: Union[int, StatTypes], use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(stat_type, [int, StatTypes], "stat_type") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.OWNERSHIP_FUND_BREAKDOWN, universe=universe, stat_type=stat_type, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/fund/_breakdown_definition.py
0.871448
0.23169
_breakdown_definition.py
pypi
from typing import TYPE_CHECKING, Optional from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import ExtendedParams, StrStrings class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ This class describe parameters to retrieve the holdings information for each fund investor companies. Parameters ---------- universe: str, list of str The Universe parameter allows the user to define the companies for which the content is returned. limit: int, optional The limit parameter is used for paging. It allows users to select the number of records to be returned. Default page size is 100 or 20 (depending on the operation). use_field_names_in_headers: bool, optional Return field name as column headers for data instead of title extended_params : ExtendedParams, optional If necessary other parameters. Examples -------- >>> from refinitiv.data.content import ownership >>> definition = ownership.fund.holdings.Definition("LP40189339") >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Ownership.Fund.HoldingsDefinition" def __init__( self, universe: "StrStrings", limit: Optional[int] = None, use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(limit, [int, type(None)], "limit") universe = try_copy_to_list(universe) super().__init__( ContentType.OWNERSHIP_FUND_HOLDINGS, universe=universe, limit=limit, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/fund/_holdings_definition.py
0.907506
0.234768
_holdings_definition.py
pypi
from typing import TYPE_CHECKING, Optional from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import ExtendedParams, StrStrings class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ This class describe parameters to retrieve company ownership details. Also, information on the top 20 fund shareholders invested in the requested company. Parameters ---------- universe: str, list of str The Universe parameter allows the user to define the companies for which the content is returned. limit: int, optional The limit parameter is used for paging. It allows users to select the number of records to be returned. Default page size is 100 or 20 (depending on the operation). use_field_names_in_headers: bool, optional Return field name as column headers for data instead of title extended_params : ExtendedParams, optional If necessary other parameters. Examples -------- >>> from refinitiv.data.content import ownership >>> definition = ownership.fund.investors.Definition("TRI.N") >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Ownership.Fund.InvestorsDefinition" def __init__( self, universe: "StrStrings", limit: Optional[int] = None, use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(limit, [int, type(None)], "limit") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.OWNERSHIP_FUND_INVESTORS, universe=universe, limit=limit, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/fund/_investors_definition.py
0.886629
0.222383
_investors_definition.py
pypi
from datetime import datetime from typing import Union, Optional, TYPE_CHECKING from .._ownership_data_provider import universe_ownership_arg_parser from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from .._enums import Frequency from ...._types import ExtendedParams, OptDateTime optional_date = Optional[Union[str, datetime]] class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ This class describe parameters to retrieve the fund shareholders investment information about the requested company, at the specified historical period. Parameters ---------- universe: str The Universe parameter allows the user to define the single company for which the content is returned. frequency: str, Frequency The frequency parameter allows users to request the frequency of the time series data, either quarterly or monthly. Available values : M, Q start: str, datetime, optional The start parameter allows users to define the start date of a time series. Dates are to be defined either by absolute or relative syntax. Example, 20190529, -1Q, 1D, -3MA. end: str, datetime, optional The end parameter allows users to define the start date of a time series. Dates are to be defined either by absolute or relative syntax. Example, 20190529, -1Q, 1D, -3MA. limit: int, optional The limit parameter is used for paging. It allows users to select the number of records to be returned. use_field_names_in_headers: bool, optional Return field name as column headers for data instead of title extended_params : ExtendedParams, optional If necessary other parameters. Examples -------- >>> from refinitiv.data.content import ownership >>> import datetime >>> definition = ownership.fund.shareholders_history_report.Definition("TRI.N", "M", start="-1Q") >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Ownership.Fund.ShareholdersHistoryReportDefinition" def __init__( self, universe: str, frequency: Union[str, "Frequency"], start: "OptDateTime" = None, end: "OptDateTime" = None, limit: Optional[int] = None, use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): universe = universe_ownership_arg_parser.parse(universe) validate_types(limit, [int, type(None)], "limit") validate_bool_value(use_field_names_in_headers) super().__init__( ContentType.OWNERSHIP_FUND_SHAREHOLDERS_HISTORY_REPORT, universe=universe, frequency=frequency, start=start, end=end, limit=limit, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/fund/_shareholders_history_report_definition.py
0.941808
0.335895
_shareholders_history_report_definition.py
pypi
from typing import TYPE_CHECKING, Optional from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import ExtendedParams, StrStrings class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ This class describe parameters to retrieve the latest fund shareholders investment information for the requested company. Parameters ---------- universe: str, list of str The Universe parameter allows the user to define the companies for which the content is returned. limit: int, optional The limit parameter is used for paging. It allows users to select the number of records to be returned. use_field_names_in_headers: bool, optional Return field name as column headers for data instead of title extended_params : ExtendedParams, optional If necessary other parameters. Examples -------- >>> from refinitiv.data.content import ownership >>> definition = ownership.fund.shareholders_report.Definition("TRI.N") >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Ownership.Fund.ShareholdersReportDefinition" def __init__( self, universe: "StrStrings", limit: Optional[int] = None, use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(limit, [int, type(None)], "limit") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.OWNERSHIP_FUND_SHAREHOLDERS_REPORT, universe=universe, limit=limit, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/fund/_shareholders_report_definition.py
0.902491
0.223292
_shareholders_report_definition.py
pypi
from typing import Union, TYPE_CHECKING from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import ExtendedParams, StrStrings from .._enums import SortOrder class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ This class describe parameters to retrieve the latest 5 buy or sell activities for the requested company. Parameters ---------- universe: str, list of str The Universe parameter allows the user to define the companies for which the content is returned. sort_order: str, SortOrder The sortOrder parameter specifies ascending (asc) or descending (desc) Sort Order. use_field_names_in_headers: bool, optional Return field name as column headers for data instead of title extended_params : ExtendedParams, optional If necessary other parameters. Examples -------- >>> from refinitiv.data.content import ownership >>> definition = ownership.consolidated.recent_activity.Definition("TRI.N", "asc") >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Ownership.Consolidated.RecentActivityDefinition" def __init__( self, universe: "StrStrings", sort_order: Union[str, "SortOrder"], use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.OWNERSHIP_CONSOLIDATED_RECENT_ACTIVITY, universe=universe, sort_order=sort_order, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/consolidated/_recent_activity_definition.py
0.882491
0.208642
_recent_activity_definition.py
pypi
from typing import Union, TYPE_CHECKING from .._enums import StatTypes from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import ExtendedParams, StrStrings class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ This class describe parameters to retrieve holdings data breakdown by Investor Types, Styles, Region, Countries, Rotations and Turnovers. Parameters ---------- universe: str, list of str The Universe parameter allows the user to define the companies for which the content is returned. stat_type: int, StatTypes The statType parameter specifies which statistics type to be returned. The types available are: - Investor Type (1) - Investment Style (2) - Region (3) - Rotation (4) - Country (5) - Metro Area (6) - Investor Type Parent (7) - Invest Style Parent (8) use_field_names_in_headers: bool, optional Return field name as column headers for data instead of title extended_params: ExtendedParams, optional If necessary other parameters. Examples -------- >>> from refinitiv.data.content import ownership >>> definition = ownership.consolidated.breakdown.Definition("TRI.N", ownership.StatTypes.INVESTOR_TYPE) >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Ownership.Consolidated.BreakdownDefinition" def __init__( self, universe: "StrStrings", stat_type: Union[int, StatTypes], use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(stat_type, [int, StatTypes], "stat_type") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.OWNERSHIP_CONSOLIDATED_BREAKDOWN, universe=universe, stat_type=stat_type, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/consolidated/_breakdown_definition.py
0.871229
0.230389
_breakdown_definition.py
pypi
from typing import Optional, TYPE_CHECKING from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import ExtendedParams, StrStrings class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ This class describe parameters to retrieve the calculated concentration data by all consolidated investors. Parameters ---------- universe: str, list of str The Universe parameter allows the user to define the companies for which the content is returned. limit: int, optional The limit parameter is used for paging. It allows users to select the number of records to be returned. Default page size is 100 or 20 (depending on the operation). use_field_names_in_headers: bool, optional Return field name as column headers for data instead of title extended_params : ExtendedParams, optional If necessary other parameters. Examples -------- >>> from refinitiv.data.content import ownership >>> definition = ownership.consolidated.investors.Definition("TRI.N") >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Ownership.Consolidated.InvestorsDefinition" def __init__( self, universe: "StrStrings", limit: Optional[int] = None, use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(limit, [int, type(None)], "limit") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.OWNERSHIP_CONSOLIDATED_INVESTORS, universe=universe, limit=limit, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/consolidated/_investors_definition.py
0.902094
0.259005
_investors_definition.py
pypi
from datetime import datetime from typing import Union, Optional, TYPE_CHECKING from .._ownership_data_provider import universe_ownership_arg_parser from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from .._enums import Frequency from ...._types import ExtendedParams, OptDateTime optional_date = Optional[Union[str, datetime]] class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ This class describe parameters to retrieve the report of the total consolidated shareholders invested in the requested company, at the specified historical period. Parameters ---------- universe: str The Universe parameter allows the user to define the single company for which the content is returned. frequency: str, Frequency The frequency parameter allows users to request the frequency of the time series data, either quarterly or monthly. Available values : M, Q start: str, datetime, optional The start parameter allows users to define the start date of a time series. Dates are to be defined either by absolute or relative syntax. Example, "20190529", "-1Q", "1D", "-3MA", datetime.datetime.now(). end: str, datetime, optional The end parameter allows users to define the start date of a time series. Dates are to be defined either by absolute or relative syntax. Example, "20190529", "-1Q", "1D", "-3MA", datetime.datetime.now(). limit: int, optional The limit parameter is used for paging. It allows users to select the number of records to be returned. use_field_names_in_headers: bool, optional Return field name as column headers for data instead of title extended_params : ExtendedParams, optional If necessary other parameters. Examples -------- >>> from refinitiv.data.content import ownership >>> definition = ownership.consolidated.shareholders_history_report.Definition("TRI.N", ownership.Frequency.MONTHLY) >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Ownership.Consolidated.ShareholdersHistoryReportDefinition" def __init__( self, universe: str, frequency: Union[str, "Frequency"], start: "OptDateTime" = None, end: "OptDateTime" = None, limit: Optional[int] = None, use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): universe = universe_ownership_arg_parser.parse(universe) validate_types(limit, [int, type(None)], "limit") validate_bool_value(use_field_names_in_headers) super().__init__( ContentType.OWNERSHIP_CONSOLIDATED_SHAREHOLDERS_HISTORY_REPORT, universe=universe, frequency=frequency, start=start, end=end, limit=limit, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/consolidated/_shareholders_history_report_definition.py
0.938287
0.346293
_shareholders_history_report_definition.py
pypi
from typing import TYPE_CHECKING, Optional from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import ExtendedParams, StrStrings class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ This class describe parameters to retrieve the latest consolidated shareholders report for the requested company. Parameters ---------- universe: str, list of str The Universe parameter allows the user to define the companies for which the content is returned. limit: int, optional The limit parameter is used for paging. It allows users to select the number of records to be returned. use_field_names_in_headers: bool, optional Return field name as column headers for data instead of title extended_params : ExtendedParams, optional If necessary other parameters. Examples -------- >>> from refinitiv.data.content import ownership >>> definition = ownership.consolidated.shareholders_report.Definition("TRI.N") >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Ownership.Consolidated.ShareholdersReportDefinition" def __init__( self, universe: "StrStrings", limit: Optional[int] = None, use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(limit, [int, type(None)], "limit") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.OWNERSHIP_CONSOLIDATED_SHAREHOLDERS_REPORT, universe=universe, limit=limit, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/consolidated/_shareholders_report_definition.py
0.898525
0.212743
_shareholders_report_definition.py
pypi
from typing import TYPE_CHECKING, Optional from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import ExtendedParams, StrStrings class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ This class describe parameters to retrieve investor name, count and holdings data for any requested investor. Parameters ---------- universe: str, list of str The Universe parameter allows the user to define the companies for which the content is returned. limit: int, optional The limit parameter is used for paging. It allows users to select the number of records to be returned. Default page size is 100 or 20 (depending on the operation). use_field_names_in_headers: bool, optional Return field name as column headers for data instead of title extended_params : ExtendedParams, optional If necessary other parameters. Examples -------- >>> from refinitiv.data.content import ownership >>> definition = ownership.investor.holdings.Definition("TRI.N") >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Ownership.Investor.HoldingsDefinition" def __init__( self, universe: "StrStrings", limit: Optional[int] = None, use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(limit, [int, type(None)], "limit") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.OWNERSHIP_INVESTOR_HOLDINGS, universe=universe, limit=limit, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/investor/_holdings_definition.py
0.896611
0.243991
_holdings_definition.py
pypi
from typing import Optional, TYPE_CHECKING from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import ExtendedParams, OptDateTime, StrStrings class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ This class describe parameters to retrieve details on stakeholders and strategic entities transactions that purchased the requested instruments. Further details of insider stakeholder can be requested along with their holding details. The operation supports pagination, however, it is dependent on user entitlements. Maximum 'count' value per page is 100. The default date range is 20 transactions, unless the 'start date' and 'end date' define a smaller range. The count value is checked by the service to determine if it does not exceed a specific number. If it does, the service will overwrite the client value to service default value. Parameters ---------- universe: str, list of str The Universe parameter allows the user to define the single company for which the content is returned. start: str, datetime, optional The start parameter allows users to define the start date of a time series. Dates are to be defined either by absolute or relative syntax. Example, 20190529, -1Q, 1D, -3MA. end: str, datetime, optional The end parameter allows users to define the start date of a time series. Dates are to be defined either by absolute or relative syntax. Example, 20190529, -1Q, 1D, -3MA. limit: int, optional The limit parameter is used for paging. It allows users to select the number of records to be returned. use_field_names_in_headers: bool, optional Return field name as column headers for data instead of title extended_params : ExtendedParams, optional If necessary other parameters. Examples -------- >>> from refinitiv.data.content import ownership >>> definition = ownership.insider.transaction_report.Definition("TRI.N", start="-1Q") >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Ownership.Insider.TransactionReportDefinition" def __init__( self, universe: "StrStrings", start: "OptDateTime" = None, end: "OptDateTime" = None, limit: Optional[int] = None, use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(limit, [int, type(None)], "limit") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.OWNERSHIP_INSIDER_TRANSACTION_REPORT, universe=universe, start=start, end=end, limit=limit, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/insider/_transaction_report_definition.py
0.9249
0.318671
_transaction_report_definition.py
pypi
from typing import TYPE_CHECKING, Optional from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import ExtendedParams, StrStrings class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ This class describe parameters to retrieve the insider shareholders/declarable stakes investment information in the requested company at the specified historical period. Parameters ---------- universe: str, list of str The Universe parameter allows the user to define the companies for which the content is returned. limit: int, optional The limit parameter is used for paging. It allows users to select the number of records to be returned. use_field_names_in_headers: bool, optional Return field name as column headers for data instead of title extended_params : ExtendedParams, optional If necessary other parameters. Examples -------- >>> from refinitiv.data.content import ownership >>> definition = ownership.insider.shareholders_report.Definition("TRI.N") >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Ownership.Insider.ShareholdersReportDefinition" def __init__( self, universe: "StrStrings", limit: Optional[int] = None, use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(limit, [int, type(None)], "limit") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.OWNERSHIP_INSIDER_SHAREHOLDERS_REPORT, universe=universe, limit=limit, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/ownership/insider/_shareholders_report_definition.py
0.90407
0.237289
_shareholders_report_definition.py
pypi
from typing import TYPE_CHECKING from .._content_data import Data from .._content_provider_layer import ContentUsageLoggerMixin from ..._content_type import ContentType from ..._tools import hp_universe_parser, validate_types, try_copy_to_list, hp_datetime_adapter from ...delivery._data._data_provider import BaseResponse, DataProviderLayer if TYPE_CHECKING: from ._enums import OptAdjustments, OptEventTypes from ..._types import OptDateTime, StrStrings, OptInt, ExtendedParams, OptStrStrs class Definition(ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]]): """ Defines the Historical Pricing Events to be retrieved. Parameters ---------- universe : str or list of str Single instrument or list of instruments. eventTypes : list of EventTypes or EventTypes or str, optional Single market event or list of events. start : str or date or datetime or timedelta, optional Start time for the events query. end : str or date or datetime or timedelta, optional End time for the events query. adjustments : list of Adjustments or Adjustments or str, optional Single adjustment type or list of adjustment types to apply CORAX (Corporate Actions) events or exchange/manual corrections to the historical time series data. count : int, optional The maximum number of rows to return. fields : list, optional List of fields to return. extended_params : dict, optional Additional parameters to apply to the request. Examples -------- >>> from refinitiv.data.content.historical_pricing import events >>> definition_events = events.Definition("EUR") >>> response = definition_events.get_data() """ _USAGE_CLS_NAME = "HistoricalPricing.EventsDefinition" def __init__( self, universe: "StrStrings", eventTypes: "OptEventTypes" = None, start: "OptDateTime" = None, end: "OptDateTime" = None, adjustments: "OptAdjustments" = None, count: "OptInt" = None, fields: "OptStrStrs" = None, extended_params: "ExtendedParams" = None, ): start = hp_datetime_adapter.get_localize(start) end = hp_datetime_adapter.get_localize(end) validate_types(count, [int, type(None)], "count") universe = try_copy_to_list(universe) universe = hp_universe_parser.get_list(universe) event_types = try_copy_to_list(eventTypes) adjustments = try_copy_to_list(adjustments) fields = try_copy_to_list(fields) super().__init__( data_type=ContentType.HISTORICAL_PRICING_EVENTS, universe=universe, event_types=event_types, start=start, end=end, adjustments=adjustments, count=count, fields=fields, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/historical_pricing/_events_definition.py
0.873916
0.252574
_events_definition.py
pypi
from functools import partial from typing import Union from ._enums import ( adjustments_arg_parser, market_sessions_arg_parser, event_types_arg_parser, ) from ..._content_type import ContentType from .._historical_data_provider import get_fields_summaries, get_fields_events from .._intervals import ( DayIntervalType, Intervals, get_day_interval_type, interval_arg_parser, ) from ..._tools import ( urljoin, ParamItem, ValueParamItem, is_date_true, ) from ..._tools._datetime import hp_datetime_adapter from ...delivery._data._data_provider import RequestFactory content_type_by_day_interval_type = { DayIntervalType.INTER: ContentType.HISTORICAL_PRICING_INTERDAY_SUMMARIES, DayIntervalType.INTRA: ContentType.HISTORICAL_PRICING_INTRADAY_SUMMARIES, } def get_content_type_by_interval(interval: Union[str, Intervals, DayIntervalType]) -> ContentType: day_interval_type = get_day_interval_type(interval) return content_type_by_day_interval_type.get(day_interval_type) def check_count(value): if value is not None and value < 1: raise ValueError("Count minimum value is 1") return value hp_summaries_query_params = [ ValueParamItem( "interval", function=interval_arg_parser.get_str, ), ValueParamItem("start", function=hp_datetime_adapter.get_str, is_true=is_date_true), ValueParamItem("end", function=hp_datetime_adapter.get_str, is_true=is_date_true), ValueParamItem( "adjustments", function=partial(adjustments_arg_parser.get_str, delim=","), ), ValueParamItem( "sessions", function=partial(market_sessions_arg_parser.get_str, delim=","), ), ValueParamItem("count", function=check_count), ParamItem("fields", function=get_fields_summaries), ] hp_events_query_params = [ ValueParamItem("interval", function=interval_arg_parser.get_str), ValueParamItem("event_types", "eventTypes", partial(event_types_arg_parser.get_str, delim=",")), ValueParamItem("start", function=hp_datetime_adapter.get_str, is_true=is_date_true), ValueParamItem("end", function=hp_datetime_adapter.get_str, is_true=is_date_true), ValueParamItem("adjustments", function=partial(adjustments_arg_parser.get_str, delim=",")), ValueParamItem("count", function=check_count), ParamItem("fields", function=get_fields_events), ] class HistoricalPricingRequestFactory(RequestFactory): def get_url(self, *args, **kwargs): url = args[1] url = urljoin(url, "/{universe}") return url def get_path_parameters(self, session=None, *, universe=None, **kwargs): if universe is None: return {} return {"universe": universe} def extend_body_parameters(self, body_parameters, extended_params=None, **kwargs): return None def extend_query_parameters(self, query_parameters, extended_params=None): if extended_params: query_parameters = dict(query_parameters) query_parameters.update(extended_params) for key in ("start", "end"): if key in extended_params: arg_date = query_parameters[key] query_parameters[key] = hp_datetime_adapter.get_str(arg_date) query_parameters = list(query_parameters.items()) return query_parameters class HistoricalPricingEventsRequestFactory(HistoricalPricingRequestFactory): @property def query_params_config(self): return hp_events_query_params class HistoricalPricingSummariesRequestFactory(HistoricalPricingRequestFactory): @property def query_params_config(self): return hp_summaries_query_params
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/historical_pricing/_historical_pricing_request_factory.py
0.708213
0.177205
_historical_pricing_request_factory.py
pypi
from enum import unique from typing import Union, List, Optional from ..._tools import make_enum_arg_parser from ..._base_enum import StrEnum # -------------------------------------------------------------------------------------- # EventTypes # -------------------------------------------------------------------------------------- @unique class EventTypes(StrEnum): """The list of market events, supported event types are trade, quote and correction.""" TRADE = "trade" QUOTE = "quote" CORRECTION = "correction" OptEventTypes = Optional[Union[str, List[str], EventTypes, List[EventTypes]]] event_types_arg_parser = make_enum_arg_parser(EventTypes) # -------------------------------------------------------------------------------------- # Adjustments # -------------------------------------------------------------------------------------- @unique class Adjustments(StrEnum): """ The list of adjustment types (comma delimiter) that tells the system whether to apply or not apply CORAX (Corporate Actions) events or exchange/manual corrections to historical time series data. The supported values of adjustments : UNADJUSTED - Not apply both exchange/manual corrections and CORAX EXCHANGE_CORRECTION - Apply exchange correction adjustment to historical pricing MANUAL_CORRECTION - Apply manual correction adjustment to historical pricing i.e. annotations made by content analysts CCH - Apply Capital Change adjustment to historical Pricing due to Corporate Actions e.g. stock split CRE - Apply Currency Redenomination adjustment when there is redenomination of currency RPO - Apply Reuters Price Only adjustment to adjust historical price only not volume RTS - Apply Reuters TimeSeries adjustment to adjust both historical price and volume QUALIFIERS - Apply price or volume adjustment to historical pricing according to trade/quote qualifier summarization actions """ UNADJUSTED = "unadjusted" EXCHANGE_CORRECTION = "exchangeCorrection" MANUAL_CORRECTION = "manualCorrection" CCH = "CCH" CRE = "CRE" RPO = "RPO" RTS = "RTS" QUALIFIERS = "qualifiers" OptAdjustments = Optional[Union[str, List[str], Adjustments, List[Adjustments]]] adjustments_arg_parser = make_enum_arg_parser(Adjustments) # -------------------------------------------------------------------------------------- # MarketSession # -------------------------------------------------------------------------------------- @unique class MarketSession(StrEnum): """ The Market Session represents a list of interested official durations in which trade and quote activities occur for a particular instrument. """ PRE = "pre" NORMAL = "normal" POST = "post" OptMarketSession = Optional[Union[str, List[str], MarketSession, List[MarketSession]]] market_sessions_arg_parser = make_enum_arg_parser(MarketSession)
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/historical_pricing/_enums.py
0.885024
0.298926
_enums.py
pypi
from typing import Union, TYPE_CHECKING from .._content_data import Data from .._intervals import DayIntervalType, Intervals, get_day_interval_type from ..._tools import hp_universe_parser, validate_types, try_copy_to_list, hp_datetime_adapter from ...delivery._data._data_provider import BaseResponse, DataProviderLayer from ._historical_pricing_request_factory import get_content_type_by_interval if TYPE_CHECKING: from ._enums import OptAdjustments, OptMarketSession from ..._types import ( OptInt, ExtendedParams, OptDateTime, StrStrings, StrStrings, ) class Definition(DataProviderLayer[BaseResponse[Data]]): """ Creates a definition containing a summary of the specified historical pricing events. Parameters ---------- universe : str or list of str Single instrument or list of instruments. interval : str or Intervals, optional Predefined interval for filtering historical pricing events. start : str or date or datetime or timedelta, optional Start time for the events query. end : str or date or datetime or timedelta, optional End time for the events query. adjustments : list of Adjustments or Adjustments or str, optional Single adjustment type or list of adjustment types to apply CORAX (Corporate Actions) events or exchange/manual corrections to the historical time series data. sessions : list of MarketSession or MarketSession or str, optional Market session durations, such as pre-market session, normal market session and post-market session. count : int, optional The maximum number of rows to return. fields : list, optional The list of fields to return. extended_params : dict, optional Additional parameters to apply to the request. Examples -------- >>> from refinitiv.data.content.historical_pricing import summaries >>> definition_summaries = summaries.Definition("EUR") >>> response = definition_summaries.get_data() """ def __init__( self, universe: "StrStrings", interval: Union[str, Intervals] = None, start: "OptDateTime" = None, end: "OptDateTime" = None, adjustments: "OptAdjustments" = None, sessions: "OptMarketSession" = None, count: "OptInt" = None, fields: "StrStrings" = None, extended_params: "ExtendedParams" = None, ) -> None: start = hp_datetime_adapter.get_localize(start) end = hp_datetime_adapter.get_localize(end) # By default, if interval is not defined, interday default value is requested day_interval_type = get_day_interval_type(interval or DayIntervalType.INTER) content_type = get_content_type_by_interval(day_interval_type) validate_types(count, [int, type(None)], "count") universe = try_copy_to_list(universe) universe = hp_universe_parser.get_list(universe) adjustments = try_copy_to_list(adjustments) sessions = try_copy_to_list(sessions) fields = try_copy_to_list(fields) super().__init__( data_type=content_type, universe=universe, interval=interval, start=start, end=end, adjustments=adjustments, sessions=sessions, count=count, fields=fields, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/historical_pricing/_summaries_definition.py
0.917838
0.291359
_summaries_definition.py
pypi
from typing import TYPE_CHECKING, Union from .._content_data import Data from ..._tools import create_repr from ...delivery._data._data_provider import DataProviderLayer, BaseResponse from ..._content_type import ContentType if TYPE_CHECKING: from ._views import Views from ..._types import ExtendedParams class Definition(DataProviderLayer[BaseResponse[Data]]): """ This class describe parameters to retrieve data for search lookup. Parameters ---------- view : str or Views picks a subset of the data universe to search against. see Views terms : str lists the symbols to be solved scope : str identifies the symbology which 'terms' belong to select : str specifies which properties to return for each result doc extended_params : dict, optional Other parameters can be provided if necessary Examples -------- >>> from refinitiv.data.content import search >>> definition = search.lookup.Definition( >>> view=search.Views.SEARCH_ALL, >>> scope="RIC", >>> terms="A,B,NOSUCHRIC,C,D", >>> select="BusinessEntity,DocumentTitle" >>>) """ def __init__( self, view: Union["Views", str], terms: str, scope: str, select: str, extended_params: "ExtendedParams" = None, ): self._view = view self._terms = terms self._scope = scope self._select = select self._extended_params = extended_params super().__init__( data_type=ContentType.DISCOVERY_LOOKUP, view=self._view, terms=self._terms, scope=self._scope, select=self._select, extended_params=self._extended_params, ) def __repr__(self): return create_repr( self, middle_path="lookup", content=f"{{view='{self._view}', terms='{self._terms}', scope='{self._scope}', select='{self._select}'}}", )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/search/_lookup_definition.py
0.911431
0.321234
_lookup_definition.py
pypi
from typing import TYPE_CHECKING, Union from ._data_provider import SearchData from ._views import Views from ..._tools import create_repr, validate_types from ...delivery._data._data_provider import DataProviderLayer, BaseResponse from ..._content_type import ContentType if TYPE_CHECKING: from ..._types import ExtendedParams class Definition(DataProviderLayer[BaseResponse[SearchData]]): """ This class describe parameters to retrieve data for search. Parameters ---------- query: str, optional Keyword argument for view view: Views or str, optional The view for searching see at Views enum. Default: Views.SEARCH_ALL filter: str, optional Where query is for unstructured end-user-oriented restriction, filter is for structured programmatic restriction. order_by: str, optional Defines the order in which matching documents should be returned. boost: str, optional This argument supports exactly the same predicate expression syntax as filter, but where filter restricts which documents are matched at all, boost just applies a large scoring boost to documents it matches, which will almost always guarantee that they appear at the top of the results. select: str, optional A comma-separated list of the properties of a document to be returned in the response. top: int, optional the maximum number of documents to retrieve. Must be non-negative. default: 10 skip: int, optional The number of documents to skip in the sorted result set before returning the next top. group_by: str, optional If specified, this must name a single Groupable property. returned documents are grouped into buckets based on their value for this property. group_count: str, optional When supplied in combination with group_by, sets the maximum number of documents to be returned per bucket. default: 3 navigators: str, optional This can name one or more properties, separated by commas, each of which must be Navigable. extended_params : dict, optional other parameters can be provided if necessary Examples -------- >>> from refinitiv.data.content import search >>> definition = search.Definition(query="cfo", view=search.Views.PEOPLE) """ def __init__( self, query: str = None, view: Union[Views, str] = Views.SEARCH_ALL, filter: str = None, order_by: str = None, boost: str = None, select: str = None, top: int = 10, skip: int = 0, group_by: str = None, group_count: int = 3, navigators: str = None, features: str = None, scope: str = None, terms: str = None, extended_params: "ExtendedParams" = None, ): validate_types(group_count, [int], "group_count") validate_types(skip, [int], "skip") validate_types(top, [int], "top") self._query = query self._view = view self._boost = boost self._features = features self._filter = filter self._group_by = group_by self._group_count = group_count self._navigators = navigators self._order_by = order_by self._scope = scope self._select = select self._skip = skip self._terms = terms self._top = top self._extended_params = extended_params super().__init__( data_type=ContentType.DISCOVERY_SEARCH, query=self._query, view=self._view, filter=self._filter, order_by=self._order_by, boost=self._boost, select=self._select, top=self._top, skip=self._skip, group_by=self._group_by, group_count=self._group_count, navigators=self._navigators, features=self._features, scope=self._scope, terms=self._terms, extended_params=self._extended_params, ) def __repr__(self): return create_repr(self, content=f"{{query='{self._query}'}}")
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/search/_definition.py
0.938527
0.441372
_definition.py
pypi
from typing import Union, TYPE_CHECKING from .._enums import Package from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import StrStrings, ExtendedParams class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ Describes the parameters used to retrieve the estimates summary for all non-periodic estimates measures. Parameters ---------- universe: str, list of str Single instrument or list of instruments. package: str, Package Packages of the content that are subsets in terms of breadth (number of fields) and depth (amount of history) of the overall content set. use_field_names_in_headers: bool, optional Boolean that indicates whether or not to display field names in the headers. extended_params: ExtendedParams, optional Specifies the parameters that will be merged with the request. Examples -------- >>> from refinitiv.data.content import estimates >>> definition = estimates.view_summary.non_periodic_measures.Definition(universe="IBM.N", package=estimates.Package.BASIC) >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Estimates.Summary.NonPeriodicMeasuresDefinition" def __init__( self, universe: "StrStrings", package: Union[str, Package], use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(package, [str, Package], "package") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.ESTIMATES_VIEW_SUMMARY_NON_PERIODIC_MEASURES, universe=universe, package=package, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/estimates/view_summary/_non_periodic_measures_definition.py
0.915764
0.221119
_non_periodic_measures_definition.py
pypi
from typing import Union, TYPE_CHECKING from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from .._enums import Package from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import StrStrings, ExtendedParams class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ Describes the parameters used to retrieve recommendation summary. Parameters ---------- universe: str, list of str Single instrument or list of instruments. package: str, Package Packages of the content that are subsets in terms of breadth (number of fields) and depth (amount of history) of the overall content set. use_field_names_in_headers: bool, optional Boolean that indicates whether or not to display field names in the headers. extended_params: ExtendedParams, optional Specifies the parameters that will be merged with the request. Examples -------- >>> from refinitiv.data.content import estimates >>> definition = estimates.view_summary.recommendations.Definition(universe="IBM.N", package=estimates.Package.BASIC) >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Estimates.Summary.RecommendationsDefinition" def __init__( self, universe: "StrStrings", package: Union[str, Package], use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(package, [str, Package], "package") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.ESTIMATES_VIEW_SUMMARY_RECOMMENDATIONS, universe=universe, package=package, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/estimates/view_summary/_recommendations_definition.py
0.900234
0.213295
_recommendations_definition.py
pypi
from typing import Union, TYPE_CHECKING from .._enums import Package from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import StrStrings, ExtendedParams class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ Describes the parameters used to retrieve the estimated summary values for the reported annual periods. Parameters ---------- universe: str, list of str Single instrument or list of instruments. package: str, Package Packages of the content that are subsets in terms of breadth (number of fields) and depth (amount of history) of the overall content set. use_field_names_in_headers: bool, optional Boolean that indicates whether or not to display field names in the headers. extended_params: ExtendedParams, optional Specifies the parameters that will be merged with the request. Examples -------- >>> from refinitiv.data.content import estimates >>> definition = estimates.view_summary.annual.Definition(universe="IBM.N", package=estimates.Package.BASIC) >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Estimates.Summary.AnnualDefinition" def __init__( self, universe: "StrStrings", package: Union[str, Package], use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(package, [str, Package], "package") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.ESTIMATES_VIEW_SUMMARY_ANNUAL, universe=universe, package=package, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/estimates/view_summary/_annual_definition.py
0.903768
0.237808
_annual_definition.py
pypi
from typing import Union, TYPE_CHECKING from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from .._enums import Package from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import StrStrings, ExtendedParams class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ Describes the parameters used to retrieve the estimates monthly historical snapshot value for non-periodic estimate measures for the last 12 months. Parameters ---------- universe: str, list of str Single instrument or list of instruments. package: str, Package Packages of the content that are subsets in terms of breadth (number of fields) and depth (amount of history) of the overall content set. use_field_names_in_headers: bool, optional Boolean that indicates whether or not to display field names in the headers. extended_params: ExtendedParams, optional Specifies the parameters that will be merged with the request. Examples -------- >>> from refinitiv.data.content import estimates >>> definition = estimates.view_summary.historical_snapshots_non_periodic_measures.Definition(universe="IBM.N", package=estimates.Package.BASIC) >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Estimates.Summary.HistoricalSnapshotsNonPeriodicMeasuresDefinition" def __init__( self, universe: "StrStrings", package: Union[str, Package], use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(package, [str, Package], "package") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_NON_PERIODIC_MEASURES, universe=universe, package=package, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/estimates/view_summary/_historical_snapshots_non_periodic_measures_definition.py
0.915493
0.22933
_historical_snapshots_non_periodic_measures_definition.py
pypi
from typing import Union, TYPE_CHECKING from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from .._enums import Package from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import StrStrings, ExtendedParams class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ Describes the parameters used to retrieve the estimated summary values for the reported interim periods. Parameters ---------- universe: str, list of str Single instrument or list of instruments. package: str, Package Packages of the content that are subsets in terms of breadth (number of fields) and depth (amount of history) of the overall content set. use_field_names_in_headers: bool, optional Boolean that indicates whether or not to display field names in the headers. extended_params: ExtendedParams, optional Specifies the parameters that will be merged with the request. Examples -------- >>> from refinitiv.data.content import estimates >>> definition = estimates.view_summary.interim.Definition(universe="IBM.N", package=estimates.Package.BASIC) >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Estimates.Summary.InterimDefinition" def __init__( self, universe: "StrStrings", package: Union[str, Package], use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(package, [str, Package], "package") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.ESTIMATES_VIEW_SUMMARY_INTERIM, universe=universe, package=package, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/estimates/view_summary/_interim_definition.py
0.86778
0.23219
_interim_definition.py
pypi
from typing import Union, TYPE_CHECKING from .._enums import Package from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import ExtendedParams, StrStrings class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ Describes the parameters used to retrieve the estimates monthly historical snapshot value for the last 12 months for all annual period estimates measures. Parameters ---------- universe: str, list of str Single instrument or list of instruments. package: str, Package Packages of the content that are subsets in terms of breadth (number of fields) and depth (amount of history) of the overall content set. use_field_names_in_headers: bool, optional Boolean that indicates whether or not to display field names in the headers. extended_params: ExtendedParams, optional Specifies the parameters that will be merged with the request. Examples -------- >>> from refinitiv.data.content import estimates >>> definition = estimates.view_summary.historical_snapshots_periodic_measures_annual.Definition(universe="IBM.N", package=estimates.Package.BASIC) >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Estimates.Summary.HistoricalSnapshotsPeriodicMeasuresAnnualDefinition" def __init__( self, universe: "StrStrings", package: Union[str, Package], use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(package, [str, Package], "package") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_PERIODIC_MEASURES_ANNUAL, universe=universe, package=package, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/estimates/view_summary/_historical_snapshots_periodic_measures_annual_definition.py
0.918777
0.244848
_historical_snapshots_periodic_measures_annual_definition.py
pypi
from typing import Union, TYPE_CHECKING from .._enums import Package from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import StrStrings, ExtendedParams class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ Describes parameters to retrieve the estimates monthly historical snapshot value for last 12 months for all interim period estimates measures. Parameters ---------- universe: str, list of str Single instrument or list of instruments. package: str, Package Packages of the content that are subsets in terms of breadth (number of fields) and depth (amount of history) of the overall content set. use_field_names_in_headers: bool, optional Boolean that indicates whether or not to display field names in the headers. extended_params: ExtendedParams, optional Specifies the parameters that will be merged with the request. Examples -------- >>> from refinitiv.data.content import estimates >>> definition = estimates.view_summary.historical_snapshots_periodic_measures_interim.Definition(universe="IBM.N", package=estimates.Package.BASIC) >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Estimates.Summary.HistoricalSnapshotsPeriodicMeasuresInterimDefinition" def __init__( self, universe: "StrStrings", package: Union[str, Package], use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(package, [str, Package], "package") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_PERIODIC_MEASURES_INTERIM, universe=universe, package=package, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/estimates/view_summary/_historical_snapshots_periodic_measures_interim_definition.py
0.920115
0.226741
_historical_snapshots_periodic_measures_interim_definition.py
pypi
from typing import Union, TYPE_CHECKING from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from .._enums import Package from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import StrStrings, ExtendedParams class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ Describes the parameters used to retrieve the estimates monthly historical snapshot value for recommendations for the last 12 months. Parameters ---------- universe: str, list of str Single instrument or list of instruments. package: str, Package Packages of the content that are subsets in terms of breadth (number of fields) and depth (amount of history) of the overall content set. use_field_names_in_headers: bool, optional Boolean that indicates whether or not to display field names in the headers. extended_params: ExtendedParams, optional Specifies the parameters that will be merged with the request. Examples -------- >>> from refinitiv.data.content import estimates >>> definition = estimates.view_summary.historical_snapshots_recommendations.Definition(universe="IBM.N", package=estimates.Package.BASIC) >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Estimates.Summary.HistoricalSnapshotsRecommendationsDefinition" def __init__( self, universe: "StrStrings", package: Union[str, Package], use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(package, [str, Package], "package") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.ESTIMATES_VIEW_SUMMARY_HISTORICAL_SNAPSHOTS_RECOMMENDATIONS, universe=universe, package=package, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/estimates/view_summary/_historical_snapshots_recommendations_definition.py
0.911346
0.221077
_historical_snapshots_recommendations_definition.py
pypi
from typing import Union, TYPE_CHECKING from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from .._enums import Package from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import StrStrings, ExtendedParams class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ Describes the parameters to retrieve the estimated actuals values for the reported annual periods. Parameters ---------- universe: str, list of str Single instrument or list of instruments. package: str, Package Packages of the content that are subsets in terms of breadth (number of fields) and depth (amount of history) of the overall content set. use_field_names_in_headers: bool, optional Boolean that indicates whether or not to display field names in the headers. extended_params: ExtendedParams, optional Specifies the parameters that will be merged with the request. Examples -------- >>> from refinitiv.data.content import estimates >>> definition = estimates.view_actuals.annual.Definition( ... universe="IBM.N", ... package=estimates.Package.BASIC ... ) >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Estimates.Actuals.AnnualDefinition" def __init__( self, universe: "StrStrings", package: Union[str, Package], use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(package, [str, Package], "package") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.ESTIMATES_VIEW_ACTUALS_ANNUAL, universe=universe, package=package, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/estimates/view_actuals/_annual_definition.py
0.907851
0.243575
_annual_definition.py
pypi
from typing import Union, TYPE_CHECKING from ..._content_data import Data from ..._content_provider_layer import ContentUsageLoggerMixin from .._enums import Package from ...._content_type import ContentType from ...._tools import validate_types, validate_bool_value, try_copy_to_list from ....delivery._data._data_provider import DataProviderLayer, BaseResponse if TYPE_CHECKING: from ...._types import StrStrings, ExtendedParams class Definition( ContentUsageLoggerMixin[BaseResponse[Data]], DataProviderLayer[BaseResponse[Data]], ): """ Describes the parameters used to retrieve the estimated actuals interim values for the reported annual periods. Parameters ---------- universe: str, list of str Single instrument or list of instruments. package: str, Package Packages of the content that are subsets in terms of breadth (number of fields) and depth (amount of history) of the overall content set. use_field_names_in_headers: bool, optional Boolean that indicates whether or not to display field names in the headers. extended_params: ExtendedParams, optional Specifies the parameters that will be merged with the request. Examples -------- >>> from refinitiv.data.content import estimates >>> definition = estimates.view_actuals.annual.Definition(universe="IBM.N", package=estimates.Package.BASIC) >>> response = definition.get_data() """ _USAGE_CLS_NAME = "Estimates.Actuals.InterimDefinition" def __init__( self, universe: "StrStrings", package: Union[str, Package], use_field_names_in_headers: bool = False, extended_params: "ExtendedParams" = None, ): validate_types(package, [str, Package], "package") validate_bool_value(use_field_names_in_headers) universe = try_copy_to_list(universe) super().__init__( ContentType.ESTIMATES_VIEW_ACTUALS_INTERIM, universe=universe, package=package, use_field_names_in_headers=use_field_names_in_headers, extended_params=extended_params, )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/estimates/view_actuals/_interim_definition.py
0.873309
0.246454
_interim_definition.py
pypi
from typing import TYPE_CHECKING, Callable, List, Union import pandas as pd from ..._tools._dataframe import ( convert_dtypes, convert_str_to_timestamp, ) from ...eikon._tools import tz_replacer if TYPE_CHECKING: from .headlines._data import HeadlineRDP, HeadlineUDF def news_build_df_udf(content_data, **kwargs) -> pd.DataFrame: columns = ["text", "storyId", "sourceCode"] raw_headlines = content_data.get("headlines", []) index = [convert_str_to_timestamp(tz_replacer(raw_headline["versionCreated"])) for raw_headline in raw_headlines] data = [[raw_headline[column] for column in columns] for raw_headline in raw_headlines] if data: df = pd.DataFrame( data=data, index=index, columns=columns, ) df = convert_dtypes(df) else: df = pd.DataFrame([], index, columns) df.index.name = "versionCreated" df.rename(columns={"text": "headline"}, inplace=True) return df def news_build_df_rdp(raw: dict, **kwargs) -> pd.DataFrame: columns = ["headline", "storyId", "sourceCode"] if isinstance(raw, list): content_data = [] for i in raw: content_data.extend(i["data"]) else: content_data = raw["data"] index = [ convert_str_to_timestamp(tz_replacer(headline["newsItem"]["itemMeta"]["versionCreated"]["$"])) for headline in content_data ] data = [] for headline_data in content_data: news_item = headline_data.get("newsItem", dict()) item_meta = news_item.get("itemMeta", {}) info_sources = news_item["contentMeta"]["infoSource"] info_source = next( (item["_qcode"] for item in info_sources if item["_role"] == "sRole:source"), None, ) data.append( [ item_meta["title"][0]["$"], headline_data["storyId"], info_source, ] ) if data: df = pd.DataFrame( data=data, index=index, columns=columns, ) df = convert_dtypes(df) else: df = pd.DataFrame([], columns=columns) df.index.name = "versionCreated" return df def _get_text_from_story(story: dict) -> str: news_item = story.get("newsItem", dict()) content_set = news_item.get("contentSet", dict()) inline_data = content_set.get("inlineData", [dict()]) return inline_data[0].get("$") def _get_headline_from_story(story: dict) -> str: news_item = story.get("newsItem", dict()) content_meta = news_item.get("contentMeta", dict()) headline = content_meta.get("headline", [dict()]) return headline[0].get("$") def get_headlines( raw: dict, build_headline: Callable[[dict], Union["HeadlineRDP", "HeadlineUDF"]], limit: int, ) -> List[Union["HeadlineRDP", "HeadlineUDF"]]: headlines = [] if isinstance(raw, list): data = [] for i in raw: data.extend(i.get("data", i.get("headlines", []))) else: data = raw.get("data", raw.get("headlines", [])) for datum in data: headline = build_headline(datum) headlines.append(headline) headlines = headlines[:limit] return headlines
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/news/_tools.py
0.59561
0.25678
_tools.py
pypi
import base64 from os import makedirs from os.path import join as join_path, exists from typing import List from ...._tools import get_from_path, cached_property class Image: def __init__(self, data): self._data = data @cached_property def _image(self): file_base_64 = get_from_path(self._data, "newsItem/OtherContent/0/_", delim="/") return base64.b64decode(file_base_64) def save(self, path: str = None): """ Parameters ---------- path : str, optional Path to save file. Default is current working directory. """ if path and not exists(path): makedirs(path) filename = self.filename if path: filename = join_path(path, filename) with open(filename, "wb+") as f: f.write(self._image) def show(self) -> "Union(Image, IPythonImage)": try: from IPython.display import Image as IPythonImage return IPythonImage(data=self._image) except ImportError: return self @property def size(self) -> int: return len(self._image) @cached_property def filename(self) -> str: return get_from_path( self._data, "newsItem/StoryProps/0/ContentMeta/0/HeadlineText/0/_", delim="/", ) @cached_property def provider(self) -> "List[str]": return get_from_path( self._data, "newsItem/StoryProps/0/ItemMeta/0/Provider", delim="/", ) @cached_property def body_type(self) -> "List[str]": return get_from_path( self._data, "newsItem/StoryProps/0/ItemMeta/0/BodyType", delim="/", ) @cached_property def source(self) -> "List[str]": return get_from_path( self._data, "newsItem/StoryProps/0/ItemMeta/0/Source", delim="/", ) @cached_property def version_created(self) -> "List[str]": return get_from_path( self._data, "newsItem/StoryProps/0/ItemMeta/0/VersionCreated", delim="/", ) @cached_property def first_created(self) -> "List[str]": return get_from_path( self._data, "newsItem/StoryProps/0/ItemMeta/0/FirstCreated", delim="/", ) @cached_property def available_rsf(self) -> "List[str]": return get_from_path( self._data, "newsItem/StoryProps/0/ItemMeta/0/AvailableRSF", delim="/", ) class ResizedImage(Image): @cached_property def _image(self): return self._data.get("image") @cached_property def filename(self): return self._data.get("filename")
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/news/images/_image.py
0.720467
0.157882
_image.py
pypi
import abc from dataclasses import dataclass, fields from typing import Optional, List import pandas as pd from humps import decamelize from pandas.core.tools.datetimes import DatetimeScalar from .. import Urgency from .._tools import get_headlines from ..._content_data import Data from ...._tools import get_from_path from ...._types import OptInt @dataclass class NewsHeadlinesData(Data): _headlines: Optional[List["HeadlineRDP"]] = None _limit: "OptInt" = None @abc.abstractmethod def _build_headlines(self, raw: dict, limit: int) -> List["HeadlineRDP"]: # override this pass @property def headlines(self) -> List["HeadlineRDP"]: if self._headlines is None: self._headlines = self._build_headlines(self.raw, self._limit) return self._headlines @dataclass class NewsHeadlinesRDPData(NewsHeadlinesData): def _build_headlines(self, raw: dict, limit: int) -> List["HeadlineRDP"]: return get_headlines(raw, headline_rdp_from_dict, limit) @dataclass class NewsHeadlinesUDFData(NewsHeadlinesData): def _build_headlines(self, raw: dict, limit: int) -> List["HeadlineUDF"]: return get_headlines(raw, headline_udf_from_dict, limit) @dataclass class HeadlineRDP: title: str creator: str source: List[dict] language: List[dict] item_codes: List[str] urgency: Urgency first_created: "DatetimeScalar" version_created: "DatetimeScalar" story_id: str def headline_rdp_from_dict(datum: dict) -> HeadlineRDP: subject = get_from_path(datum, "newsItem.contentMeta.subject") item_codes = [item.get("_qcode") for item in subject] urgency = get_from_path(datum, "newsItem.contentMeta.urgency.$") urgency = Urgency(urgency) first_created = get_from_path(datum, "newsItem.itemMeta.firstCreated.$") first_created = pd.to_datetime(first_created) version_created = get_from_path(datum, "newsItem.itemMeta.versionCreated.$") version_created = pd.to_datetime(version_created) headline = HeadlineRDP( title=get_from_path(datum, "newsItem.itemMeta.title.0.$"), creator=get_from_path(datum, "newsItem.contentMeta.creator.0._qcode"), source=get_from_path(datum, "newsItem.contentMeta.infoSource"), language=get_from_path(datum, "newsItem.contentMeta.language"), item_codes=item_codes, urgency=urgency, first_created=first_created, version_created=version_created, story_id=datum["storyId"], ) return headline @dataclass class HeadlineUDF: display_direction: str document_type: str first_created: "DatetimeScalar" is_alert: bool language: str report_code: str source_name: str story_id: str text: str version_created: "DatetimeScalar" def headline_udf_from_dict(datum: dict) -> HeadlineUDF: keys = [field.name for field in fields(HeadlineUDF)] kwargs = decamelize(datum) kwargs = {k: v for k, v in kwargs.items() if k in keys} kwargs["first_created"] = pd.to_datetime(kwargs["first_created"]) kwargs["version_created"] = pd.to_datetime(kwargs["version_created"]) headline = HeadlineUDF(**kwargs) return headline
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/news/headlines/_data.py
0.81648
0.157428
_data.py
pypi
from typing import Callable, Optional, TYPE_CHECKING, Union from .._news_data_provider_layer import NewsDataProviderLayer from ..headlines._sort_order import SortOrder from ...._content_type import ContentType from ...._tools import create_repr if TYPE_CHECKING: from ...._core.session import Session from ...._types import ExtendedParams, OptDateTime class Definition(NewsDataProviderLayer): """ This class describes parameters to retrieve data for news headlines. Parameters ---------- query: str The user search query. count: int, optional Count to limit number of headlines. Min value is 0. Default: 10 date_from: str or timedelta, optional Beginning of date range. String format is: '%Y-%m-%dT%H:%M:%S'. e.g. '2016-01-20T15:04:05'. date_to: str or timedelta, optional End of date range. String format is: '%Y-%m-%dT%H:%M:%S'. e.g. '2016-01-20T15:04:05'. sort_order: str or SortOrder Sort order for the response. Default: SortOrder.new_to_old extended_params: dict, optional Additional parameters to provide to the API. Examples -------- >>> from datetime import timedelta >>> from refinitiv.data.content import news >>> definition = news.headlines.Definition( ... "Refinitiv", ... date_from="20.03.2021", ... date_to=timedelta(days=-4), ... count=3 ... ) """ def __init__( self, query: str, count: int = 10, date_from: "OptDateTime" = None, date_to: "OptDateTime" = None, sort_order: Union[str, SortOrder] = SortOrder.new_to_old, extended_params: "ExtendedParams" = None, ): super().__init__( data_type=ContentType.NEWS_HEADLINES_RDP, query=query, count=count, date_from=date_from, date_to=date_to, sort_order=sort_order, extended_params=extended_params, ) self._query = query def get_data( self, session: Optional["Session"] = None, on_response: Optional[Callable] = None, on_page_response: Optional[Callable] = None, ): """ Returns a response from the API to the library Parameters ---------- session : Session, optional The Session defines the source where you want to retrieve your data on_response : Callable, optional Callable object to process retrieved data on_page_response : Callable, optional Callable object to process retrieved data Returns ------- NewsHeadlinesResponse Raises ------ AttributeError If user didn't set default session. Examples -------- >>> from datetime import timedelta >>> from refinitiv.data.content import news >>> definition = news.headlines.Definition( ... query="Refinitiv", ... date_from="20.03.2021", ... date_to=timedelta(days=-4), ... count=3 ... ) >>> response = definition.get_data() """ self._kwargs["on_page_response"] = on_page_response return super().get_data(session, on_response) async def get_data_async( self, session: Optional["Session"] = None, on_response: Optional[Callable] = None, on_page_response: Optional[Callable] = None, closure: Optional[str] = None, ): """ Returns a response asynchronously from the API to the library Parameters ---------- session : Session, optional The Session defines the source where you want to retrieve your data on_response : Callable, optional Callable object to process retrieved data on_page_response : Callable, optional Callable object to process retrieved data closure : str, optional Specifies the parameter that will be merged with the request Returns ------- NewsHeadlinesResponse Raises ------ AttributeError If user didn't set default session. Examples -------- >>> from datetime import timedelta >>> from refinitiv.data.content import news >>> definition = news.headlines.Definition( ... query="Refinitiv", ... date_from="20.03.2021", ... date_to=timedelta(days=-4), ... count=3 ... ) >>> response = await definition.get_data_async() """ self._kwargs["on_page_response"] = on_page_response return await super().get_data_async(session, on_response, closure) def __repr__(self): return create_repr( self, content=f"{{query='{self._query}'}}", )
/refinitiv-data-1.3.1.tar.gz/refinitiv-data-1.3.1/refinitiv/data/content/news/headlines/_definition.py
0.944087
0.312475
_definition.py
pypi
import logging import os import sys from datetime import datetime from functools import lru_cache from logging.handlers import RotatingFileHandler, TimedRotatingFileHandler from typing import Tuple, Any from refinitiv.dataplatform.tools._specification import BaseSpecification # --------------------------------------------------------------------------- # Conversion from TS/JS to Python # --------------------------------------------------------------------------- TRACE = 5 py_grade = logging._nameToLevel.copy() py_grade["TRACE"] = TRACE # add an additional level logging.addLevelName(TRACE, "TRACE") py_grade = { f"py_{lname}": {"name": lname, "level": llevel} for lname, llevel in py_grade.items() } ts_grade = { "ts_trace": {"name": "trace", "level": 0}, "ts_debug": {"name": "debug", "level": 1}, "ts_info": {"name": "info", "level": 2}, "ts_warn": {"name": "warn", "level": 3}, "ts_error": {"name": "error", "level": 4}, "ts_silent": {"name": "silent", "level": 5}, } conversion_schema = [ ("ts_trace", "py_TRACE"), ("ts_debug", "py_DEBUG"), ("ts_info", "py_INFO"), ("ts_warn", "py_WARNING"), ("ts_error", "py_ERROR"), ("ts_silent", "py_CRITICAL"), ] py_by_ts_nameToName = { ts_grade[ts_]["name"]: py_grade[py_]["name"] for ts_, py_ in conversion_schema } py_by_ts_levelToLevel = { ts_grade[ts_]["level"]: py_grade[py_]["level"] for ts_, py_ in conversion_schema } # --------------------------------------------------------------------------- # File handler # --------------------------------------------------------------------------- bytes_by_suffix = { "B": 1, # B "K": 2 ** 10, # KiB "M": 2 ** 20, # MiB "G": 2 ** 30, # GiB } def convert_filesize(s) -> int: if isinstance(s, int): return s if isinstance(s, str): suffix_ = s[-1] count_ = int(s[:-1]) bytes_ = bytes_by_suffix[suffix_] count_bytes_ = count_ * bytes_ return count_bytes_ def convert_interval(s) -> Tuple[int, str]: when_ = s[-1] interval_ = int(s[:-1]) # Months if when_ == "M": when_ = "D" interval_ = interval_ * 30 return interval_, when_ class TimedSizedRotatingHandler(TimedRotatingFileHandler, RotatingFileHandler): def __init__( self, filename, file_mode="a", max_bytes=0, backup_count=0, encoding="ascii", delay=False, when="h", interval=1, utc=False, at_time=None, *args, **kwargs, ): if file_mode.startswith("w"): try: os.remove(filename) except Exception: pass self.filename = filename RotatingFileHandler.__init__( self, filename=filename, mode=file_mode, maxBytes=max_bytes, backupCount=backup_count, encoding=encoding, delay=delay, ) TimedRotatingFileHandler.__init__( self, filename=filename, when=when, interval=interval, backupCount=backup_count, encoding=encoding, delay=delay, utc=utc, atTime=at_time, ) if os.path.sep in filename: self.baseFilename = filename def shouldRollover(self, record): timed_rollover = TimedRotatingFileHandler.shouldRollover(self, record) sized_rollover = RotatingFileHandler.shouldRollover(self, record) return timed_rollover or sized_rollover def doRollover(self): super(TimedRotatingFileHandler, self).doRollover() def getFilesToDelete(self): return super(TimedRotatingFileHandler, self).getFilesToDelete() def _filenamer(base_filename): basename_ = os.path.basename(base_filename) date_, time_, pid_, *name_, name_with_count_ = basename_.split("-") *name_chunk_, count_ = name_with_count_.split(".") name_.append(".".join(name_chunk_)) name_ = "-".join(name_) new_basename_ = "-".join([date_, time_, count_, pid_, name_]) return base_filename.replace(basename_, new_basename_) _file_handler_formatter = logging.Formatter( "[%(asctime)s] - " "[%(name)s] - " "[%(levelname)s] - " "[%(thread)d - %(threadName)s] - " "[%(module)s] - " "[%(funcName)s] - " "%(message)s" ) def _get_filename(filename_: str, datetime_: datetime, pid_: int) -> str: date_ = datetime_.strftime("%Y%m%d") time_ = datetime_.strftime("%H%M") filename_ = filename_.replace("\\", os.path.sep) filename_ = os.path.normpath(filename_) *path, filename = filename_.split(os.path.sep) if path: new_filename = f"{date_}-{time_}-{pid_}-{filename}" path.append(new_filename) filename_ = f"{os.path.sep}".join(path) else: filename_ = f"{date_}-{time_}-{pid_}-{filename}" return filename_ def _create_log_file_handler(): from refinitiv.dataplatform import configure # file name name_ = configure.get_str(configure.keys.log_filename) filename_ = _get_filename(name_, datetime.now(), os.getpid()) # file size file_size_ = configure.get_str(configure.keys.log_file_size) file_size_ = convert_filesize(file_size_) # max files count max_files_ = configure.get_int(configure.keys.log_max_files) # interval interval_ = configure.get_str(configure.keys.log_interval) interval_, when_ = convert_interval(interval_) handler_ = TimedSizedRotatingHandler( filename_, max_bytes=file_size_, when=when_, interval=interval_, backup_count=max_files_, encoding="utf-8", delay=True, ) handler_.namer = _filenamer handler_.setFormatter(_file_handler_formatter) return handler_ # --------------------------------------------------------------------------- # Stdout handler # --------------------------------------------------------------------------- _stdout_formatter = logging.Formatter( "[%(asctime)s] - " "[%(levelname)s] - " "[%(name)s] - " "[%(thread)d] | " "%(threadName)s\n" "%(message)s" ) def _create_log_stdout_handler(): handler_ = logging.StreamHandler(sys.stdout) handler_.setFormatter(_stdout_formatter) return handler_ # --------------------------------------------------------------------------- # Filtering # --------------------------------------------------------------------------- class NotLog(BaseSpecification): def is_satisfied_by(self, record: Any) -> bool: return False class LogEverything(BaseSpecification): def is_satisfied_by(self, record: Any) -> bool: return True class NotLogWithName(BaseSpecification): def __init__(self, name) -> None: super().__init__() self.name = name def is_satisfied_by(self, record: Any) -> bool: return self.name != record.name class LogWithName(BaseSpecification): def __init__(self, name) -> None: super().__init__() self.name = name def is_satisfied_by(self, record: Any) -> bool: return self.name == record.name class LogStartsWithName(BaseSpecification): def __init__(self, name) -> None: super().__init__() self.name = name def is_satisfied_by(self, record: Any) -> bool: return record.name.startswith(self.name) class NotLogStartsWithName(BaseSpecification): def __init__(self, name) -> None: super().__init__() self.name = name def is_satisfied_by(self, record: Any) -> bool: return not record.name.startswith(self.name) default_log_filter = "*" def join_by_and_(prev_spec, spec): return prev_spec and prev_spec.and_(spec) or spec def join_by_or_(prev_spec, spec): return prev_spec and prev_spec.or_(spec) or spec def make_filter(text): ss = [s.strip() for s in text.split(",") if s] if not ss: can_log = NotLog() else: can_log = None for s in ss: if s == "*": can_log = join_by_or_(can_log, LogEverything()) elif s.startswith("-") and s.endswith("*"): can_log = join_by_and_(can_log, NotLogStartsWithName(s[1:-1])) elif s.startswith("-"): can_log = join_by_and_(can_log, NotLogWithName(s[1:])) elif s.endswith("*"): can_log = join_by_or_(can_log, LogStartsWithName(s[:-1])) else: can_log = join_by_or_(can_log, LogWithName(s)) def inner(record): return can_log.is_satisfied_by(record) return inner # --------------------------------------------------------------------------- # Log level # --------------------------------------------------------------------------- def convert_log_level(level) -> int: py_level_ = None if isinstance(level, str): level_ = level.strip() py_level_ = py_by_ts_nameToName.get(level_) if py_level_ is None: py_level_ = level py_level_ = logging._nameToLevel.get(py_level_) elif isinstance(level, int): py_level_ = level return py_level_ or logging.INFO def read_log_level_config(): from refinitiv.dataplatform import configure level_ = configure.get_str(configure.keys.log_level) return convert_log_level(level_) # --------------------------------------------------------------------------- # Create and dispose logger # --------------------------------------------------------------------------- _log_file_handler = _create_log_file_handler() _log_stream_handler = _create_log_stdout_handler() _existing_loggers = [] @lru_cache(None) def create_logger(name): from refinitiv.dataplatform import configure # construct the logger object for session logger_ = logging.getLogger(name) log_file_enabled_ = configure.get(configure.keys.log_file_enabled, True) if log_file_enabled_: logger_.addHandler(_log_file_handler) log_console_enabled_ = configure.get(configure.keys.log_console_enabled, False) if log_console_enabled_: logger_.addHandler(_log_stream_handler) else: logger_.propagate = False log_level_ = read_log_level_config() if log_level_ != logger_.level: logger_.setLevel(log_level_) log_filter_ = configure.get(configure.keys.log_filter, default_log_filter) logger_.addFilter(make_filter(log_filter_)) _existing_loggers.append(name) return logger_ def get_logger(name): return create_logger(name) def set_log_level(logger, level): if isinstance(logger, str): logger = get_logger(logger) logger.setLevel(level) return logger def existing_loggers(): return _existing_loggers def dispose_logger(logger): if isinstance(logger, str): logger = get_logger(logger) handlers_ = logger.handlers[:] for hdlr_ in handlers_: hdlr_.close() logger.removeHandler(hdlr_) return logger # --------------------------------------------------------------------------- # Root logger # --------------------------------------------------------------------------- root_logger = create_logger("rdp")
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/log.py
0.479016
0.150871
log.py
pypi
# __all__ = ['OMMItemStreamCallback'] class OMMItemStreamCallback(object): """ All callbacks for item stream. Raises ------ Exception If request fails or if Refinitiv Services return an error """ def __init__(self): self._on_refresh_cb = None self._on_update_cb = None self._on_error_cb = None self._on_status_cb = None self._on_complete_cb = None @property def on_refresh(self): """Called when the stream is opened or when the record is refreshed with a new image. This callback receives a full image Default: None Returns ------- callable object """ return self._on_refresh_cb @on_refresh.setter def on_refresh(self, on_refresh_cb): self._on_refresh_cb = on_refresh_cb @property def on_update(self): """Called when an update is received. This callback receives an utf-8 string as argument. Default: None Returns ------- callable object """ return self._on_update_cb @on_update.setter def on_update(self, on_update_cb): self._on_update_cb = on_update_cb @property def on_error(self): """Called when an error occurs. This callback receives Exception as argument Default: None Returns ------- callable object """ return self._on_error_cb @on_error.setter def on_error(self, on_error_cb): self._on_error_cb = on_error_cb @property def on_status(self): """Called when subscription status changed. This callback receives an status as argument. Default: None Returns ------- callable object """ return self._on_status_cb @on_status.setter def on_status(self, on_status_cb): self._on_status_cb = on_status_cb @property def on_complete(self): """Called when the stream received all expected data. Default: None Returns ------- callable object """ return self._on_complete_cb @on_complete.setter def on_complete(self, on_complete_cb): self._on_complete_cb = on_complete_cb
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/delivery/stream/omm_item_stream_callback.py
0.775945
0.157202
omm_item_stream_callback.py
pypi
__all__ = ["get_data", "TR_Field"] import pandas as pd import numpy import refinitiv.dataplatform.legacy.json_requests from .tools import ( get_json_value, is_string_type, check_for_string_or_list_of_strings, build_dictionary, build_list, DefaultSession, ) DataGrid_UDF_endpoint = "DataGrid" DataGridAsync_UDF_endpoint = "DataGrid_StandardAsync" def TR_Field(field_name, params=None, sort_dir=None, sort_priority=None): """ This is a helper legacy to build the field for the get_data legacy. Parameters ---------- field_name: string Field name to request. You can find the list in Data Item Browser. params: dict Dictionary containing the parameters for the field passed in the argument field_name sort_dir: string Indicate the sort direction. Possible values are 'asc' or 'desc'. The default value is 'asc' sort_priority: integer Gives a priority to the field for the sorting. The highest priority is 0 (zero). the default value is None Return ------ Returns a dictionary that can directly passed to get_data. Example ------- TR_Field('tr.revenue') TR_Field('tr.open','asc',1) TR_Field('TR.GrossProfit',{'Scale': 6, 'Curn': 'EUR'},'asc',0) """ logger = DefaultSession.get_default_session().logger() if params and type(params) != dict: logger.error("TR_Field error: The argument params must be a dictionary") raise ValueError("TR_Field error: The argument params must be a dictionary") if type(params) == dict and not bool(params): error_msg = "TR_Field error: The argument params must be a non empty dictionary or set to None (default value if not set)" logger.error(error_msg) raise ValueError(error_msg) field = {field_name: {}} if params: field[field_name]["params"] = params if sort_dir: if is_string_type(sort_dir) and sort_dir in ["asc", "desc"]: field[field_name]["sort_dir"] = sort_dir else: error_msg = 'TR_Field error: The argument sort_dir must be a string ("asc" or "desc")' logger.error(error_msg) raise ValueError(error_msg) if sort_priority: if type(sort_priority) is not int: error_msg = "TR_Field error: The argument sort_priority must be a integer" logger.error(error_msg) raise ValueError(error_msg) field[field_name]["sort_priority"] = sort_priority return field def get_data( instruments, fields, parameters=None, field_name=False, raw_output=False, debug=False, raw_response=False, ): """ Returns a pandas.DataFrame with fields in columns and instruments as row index Parameters ---------- instruments: string or list Single instrument or list of instruments to request. fields: string, dictionary or list of strings and/or dictionaries. List of fields to request. Examples: - 'TR.PriceClose' - {'TR.GrossProfit': { 'params':{ 'Scale': 6, 'Curn': 'EUR' }} - {'TR.GrossProfit': { 'params':{ 'Scale': 6, 'Curn': 'EUR' },sort_dir:'desc'} - ['TR.PriceClose','TR.PriceOpen'] - [{'TR.PriceClose': {'sort_dir':asc,sort_priority:1}},{'TR.PriceOpen': {'sort_dir':asc,sort_priority:0}} You can use the legacy TR_Field to build the fields: >>> fields = [ek.TR_Field('tr.revenue'),ek.TR_Field('tr.open','asc',1),ek.TR_Field('TR.GrossProfit',{'Scale': 6, 'Curn': 'EUR'},'asc',0)] >>> data, err = ek.get_data(["IBM","MSFT.O"],fields) Tips: You can launch the Data Item Browser to discover fields and parameters, or copy field names and parameters from TR Eikon - MS Office formulas parameters: string or dictionary, optional Single global parameter key=value or dictionary of global parameters to request. Default: None field_name: boolean, optional Define if column headers are filled with field name or display names. If True value, field names will ube used as column headers. Otherwise, the full display name will be used. Default: False raw_output: boolean, optional By default the output is a pandas.DataFrame. Set raw_output=True to get data in Json format. Default: False debug: boolean, optional When set to True, the json request and response are printed. Default value is False Returns ------- pandas.DataFrame Returns pandas.DataFrame with fields in columns and instruments as row index errors Returns a list of errors Raises ---------- Exception If http request fails or if server returns an error. ValueError If a parameter type or value is wrong. Examples -------- >>> import eikon as ek >>> ek.set_app_key('set your app key here') >>> data, err = ek.get_data(["IBM", "GOOG.O", "MSFT.O"], ["TR.PriceClose", "TR.Volume", "TR.PriceLow"]) >>> data, err = ek.get_data("IBM", ['TR.Employees', {'TR.GrossProfit':{'params':{'Scale': 6, 'Curn': 'EUR'},'sort_dir':'asc'}}]) >>> fields = [ek.TR_Field('tr.revenue'),ek.TR_Field('tr.open',None,'asc',1),ek.TR_Field('TR.GrossProfit',{'Scale': 6, 'Curn': 'EUR'},'asc',0)] >>> data, err = ek.get_data(["IBM","MSFT.O"],fields) """ logger = DefaultSession.get_default_session().logger() check_for_string_or_list_of_strings(instruments, "instruments") instruments = build_list(instruments, "instruments") instruments = [value.upper() if value.islower() else value for value in instruments] if parameters: parameters = build_dictionary(parameters, "parameters") fields = parse_fields(fields) fields_for_request = [] for f in fields: keys = list(f.keys()) if len(keys) != 1: with "get_data error: The field dictionary should contain a single key which is the field name" as msg: logger.error(msg) raise ValueError(msg) name = list(f.keys())[0] field_info = f[name] if type(field_info) != dict: with "get_data error: The parameters for the file {} should be passed in a dict".format( name ) as error_msg: logger.error(error_msg) raise ValueError(error_msg) field = {"name": name} if "sort_dir" in list(field_info.keys()): field["sort"] = field_info["sort_dir"] if "sort_priority" in list(field_info.keys()): field["sortPriority"] = field_info["sort_priority"] if "params" in list(field_info.keys()): field["parameters"] = field_info["params"] fields_for_request.append(field) payload = {"instruments": instruments, "fields": fields_for_request} if parameters: payload.update({"parameters": parameters}) _endpoint = DataGridAsync_UDF_endpoint if _endpoint == DataGridAsync_UDF_endpoint: payload = {"requests": [payload]} response = refinitiv.dataplatform.legacy.json_requests.send_json_request( _endpoint, payload, debug=debug, raw_response=raw_response ) if raw_response: return response result = response.json() if result.get("responses"): result = result["responses"][0] if raw_output: return result return get_data_frame(result, field_name) def parse_fields(fields): if is_string_type(fields): return [{fields: {}}] logger = DefaultSession.get_default_session().logger() if type(fields) == dict: if len(fields) == 0: with "get_data error: fields list must not be empty" as error_msg: logger.error(error_msg) raise ValueError(error_msg) return [fields] field_list = [] if type(fields) == list: if len(fields) == 0: with "get_data error: fields list must not be empty" as error_msg: logger.error(error_msg) raise ValueError(error_msg) for f in fields: if is_string_type(f): field_list.append({f: {}}) elif type(f) == dict: field_list.append(f) else: error_msg = ( "get_data error: the fields should be of type string or dictionary" ) DefaultSession.get_default_session().logger().error(error_msg) raise ValueError(error_msg) return field_list error_msg = "get_data error: the field parameter should be a string, a dictionary , or a list of strings|dictionaries" DefaultSession.get_default_session().logger().error(error_msg) raise ValueError(error_msg) def get_data_value(value): if is_string_type(value): return value elif value is dict: return value["value"] else: return value def get_data_frame(data_dict, field_name=False): if field_name: headers = [ header.get("field", header.get("displayName")) for header in data_dict["headers"][0] ] else: headers = [header["displayName"] for header in data_dict["headers"][0]] data = numpy.array( [[get_data_value(value) for value in row] for row in data_dict["data"]] ) if len(data): df = pd.DataFrame(data, columns=headers) df = df.apply(pd.to_numeric, errors="ignore") if not df.empty: df = df.convert_dtypes() else: df = pd.DataFrame([], columns=headers) errors = get_json_value(data_dict, "error") return df, errors
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/legacy/data_grid.py
0.723114
0.283834
data_grid.py
pypi
import json from datetime import date, datetime, timedelta from typing import Union, Tuple import dateutil.parser from dateutil import tz __all__ = [ "get_default_session", "set_default_session", "close_session", "set_app_key", "set_log_level", ] def is_string_type(value): try: return isinstance(value, basestring) except NameError: return isinstance(value, str) def get_json_value(json_data, name): if name in json_data: return json_data[name] else: return None def to_datetime( date_value: Union[str, timedelta, Tuple[datetime, date]] ) -> Union[tuple, datetime, None]: if date_value is None: return None if isinstance(date_value, timedelta): return datetime.now(tz.tzlocal()) + date_value if isinstance(date_value, (datetime, date)): return date_value try: return dateutil.parser.parse(date_value) except ValueError as e: raise e except Exception as e: raise ValueError(e) def _to_utc(datetime_value): if datetime_value is None: return None _value = to_datetime(datetime_value) UTC = tz.gettz("UTC") _value = _value.astimezone(UTC).replace(tzinfo=None) return _value def to_utc_datetime(datetime_value): datetime_value = _to_utc(datetime_value) if datetime_value is None: return None return datetime_value # .strftime("%Y-%m-%d %H:%M:%S") def to_utc_date(date_value): date_value = _to_utc(date_value) if date_value is None: return None return date_value.date() def to_utc_datetime_isofmt(datetime_value): datetime_value = _to_utc(datetime_value) if datetime_value is None: return None datetime_value = datetime_value.isoformat(timespec="microseconds") + "000Z" return datetime_value def get_date_from_today(days_count): if type(days_count) != int: raise ValueError( "The parameter {} should be an integer, found {}".format( days_count, type(days_count) ) ) return datetime.now(tz.tzlocal()) + timedelta(days=-days_count) def is_list_of_string(values): return all(is_string_type(value) for value in values) def check_for_string(parameter, name): if not is_string_type(parameter): raise ValueError( "The parameter {} should be a string, found {}".format(name, str(parameter)) ) def check_for_string_or_list_of_strings(parameter, name): if type(parameter) != list and (not parameter or not is_string_type(parameter)): raise ValueError( "The parameter {} should be a string or a list of string, found {}".format( name, type(parameter) ) ) if type(parameter) == list and not is_list_of_string(parameter): raise ValueError( "All items in the parameter {} should be of data type string, found {}".format( name, [type(v) for v in parameter] ) ) def check_for_int(parameter, name): if type(parameter) is not int: raise ValueError( "The parameter {} should be an int, found {} type value ({})".format( name, type(parameter), str(parameter) ) ) def build_list_with_params(values, name): if values is None: raise ValueError(name + " is None, it must be a string or a list of strings") if is_string_type(values): return [(v, None) for v in values.split()] elif type(values) is list: try: return [ (value, None) if is_string_type(value) else (value[0], value[1]) for value in values ] except Exception: raise ValueError( name + " must be a string or a list of strings or a tuple or a list of tuple" ) else: try: return values[0], values[1] except Exception: raise ValueError( name + " must be a string or a list of strings or a tuple or a list of tuple" ) def build_list(values, name): if values is None: raise ValueError(name + " is None, it must be a string or a list of strings") if is_string_type(values): return [values.strip()] elif type(values) is list: if all(is_string_type(value) for value in values): return [value for value in values] else: raise ValueError(name + " must be a string or a list of strings") else: raise ValueError(name + " must be a string or a list of strings") def build_dictionary(dic, name): if dic is None: raise ValueError( name + " is None, it must be a string or a dictionary of strings" ) if is_string_type(dic): return json.loads(dic) elif type(dic) is dict: return dic else: raise ValueError(name + " must be a string or a dictionary") def tz_replacer(s): if isinstance(s, str): if s.endswith("Z"): s = s[:-1] elif s.endswith("-0000"): s = s[:-5] if s.endswith(".000"): s = s[:-4] return s def set_default_session(session): DefaultSession.set_default_session(session) def get_default_session(app_key=None): return DefaultSession.get_default_session(app_key) def close_session(): DefaultSession.get_default_session().close() def set_app_key(app_key): from refinitiv.dataplatform.core.session.session import Session _session = get_default_session(app_key) if _session.get_open_state() == Session.State.Closed: _session.open() def set_log_level(log_level): from refinitiv.dataplatform.core.session.session import Session default_session = DefaultSession.get_default_session() default_session.set_log_level(log_level) class DefaultSession(object): # singleton session __default_session = None @classmethod def set_default_session(cls, session): from refinitiv.dataplatform.core.session.session import Session if isinstance(session, Session): cls.__default_session = session @classmethod def get_default_session(cls, app_key=None): from refinitiv.dataplatform.core.session.desktop_session import DesktopSession if cls.__default_session is None: if app_key is None: return None cls.__default_session = DesktopSession(app_key) elif app_key is not None: if app_key != cls.__default_session.app_key: cls.__default_session.close() cls.__default_session = DesktopSession(app_key) return cls.__default_session @classmethod def close_default_session(cls): if cls.__default_session is not None: cls.__default_session.close()
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/legacy/tools.py
0.607663
0.228802
tools.py
pypi
__all__ = ["get_news_headlines", "get_news_story"] import refinitiv.dataplatform.legacy.json_requests, refinitiv.dataplatform.core.session import json import pandas as pd import numpy from .tools import is_string_type, to_datetime, tz_replacer, DefaultSession News_Headlines_UDF_endpoint = "News_Headlines" News_Story_UDF_endpoint = "News_Story" def get_news_headlines( query=None, count=10, date_from=None, date_to=None, raw_output=False, debug=False ): """ Returns a list of news headlines Parameters ---------- query: string, optional News headlines search criteria. The text can contain RIC codes, company names, country names and operators (AND, OR, NOT, IN, parentheses and quotes for explicit search…). Tip: Append 'R:' in front of RIC names to improve performance. Default: Top News written in English count: int, optional Max number of headlines retrieved. Value Range: [1-100]. Default: 10 date_from: string or datetime, optional Beginning of date range. String format is: '%Y-%m-%dT%H:%M:%S'. e.g. '2016-01-20T15:04:05'. date_to: string or datetime, optional End of date range. String format is: '%Y-%m-%dT%H:%M:%S'. e.g. '2016-01-20T15:04:05'. raw_output: boolean, optional Set this parameter to True to get the data in json format if set to False, the legacy will return a data frame Default: False debug: boolean, optional When set to True, the json request and response are printed. Default: False Returns ------- pandas.DataFrame Returns a DataFrame of news headlines with the following columns: - Index : Timestamp of the publication time - version_created : Date of the latest update on the news - text : Text of the Headline - story_id : Identifier to be used to retrieve the full story using the get_news_story legacy - source_code : Second news identifier Raises ---------- Exception If http request fails or if server returns an error AttributeError If a parameter type is wrong Examples -------- >> import refinitiv.dataplatform as ek >> ek.set_app_key('set your app id here') >> headlines = ek.get_news_headlines("R:MSFT.O", 2) >> headlines versionCreated text \ 2016-04-13 18:28:57.000 2.016-04-13 18:28:59.001 RBC Applies Blockchain as a Loyalty Boost<MSFT... 2016-04-13 17:28:21.577 2016-04-13 17:28:21.671 UPDATE 2-Long-stalled email privacy bill advan... storyId 2016-04-13 18:28:57.000 urn:newsml:reuters.com:20160413:nNRA1uxh03:1 2016-04-13 17:28:21.577 urn:newsml:reuters.com:20160413:nL2N17G16Q:2 >> headlines = ek.get_news_headlines("R:MSFT.O IN FRANCE") >> headlines = ek.get_news_headlines("R:MSFT.O IN FRANCE IN ENGLISH", count=5) >> headlines = ek.get_news_headlines("OBA* OR CLINTON IN ENGLISH", count=5) """ logger = DefaultSession.get_default_session().logger() if query is None: query = "Topic:TOPALL and Language:LEN" # check parameters type and values if not is_string_type(query): error_msg = "query must be a string" logger.error(error_msg) raise ValueError(error_msg) # query string must be formated as a "" string containing '' substrings # (and not a '' string containing "" substrings) query = query.replace('"', "'") # validate query JSON format test_query = '{"query":"' + query + '"}' try: json.loads(str(test_query)) except ValueError as error: error_msg = "query {} has invalid format. {}".format(test_query, str(error)) logger.debug(error_msg) raise ValueError(error_msg) if type(count) is not int: error_msg = "count must be an integer" logger.error(error_msg) raise ValueError(error_msg) elif count < 0: error_msg = "count must be equal or greater than 0" logger.error(error_msg) raise ValueError(error_msg) # build the payload app_key = DefaultSession.get_default_session().app_key payload = { "number": str(count), "query": query, "productName": app_key, "attributionCode": "", } if date_from is not None: payload.update({"dateFrom": to_datetime(date_from).isoformat()}) if date_to is not None: payload.update({"dateTo": to_datetime(date_to).isoformat()}) response = refinitiv.dataplatform.legacy.json_requests.send_json_request( News_Headlines_UDF_endpoint, payload, debug=debug ) result = response.json() if raw_output: return result else: return get_data_frame(result) def get_data_frame(json_data): Headline_Selected_Fields = ["versionCreated", "text", "storyId", "sourceCode"] Headline_All_Fields_ = [ "text", "storyId", "bodyType", "displayDirection", "documentType", "isAlert", "language", "permIDs", "products", "rcs", "reportCode", "sourceCode", "sourceName", "versionCreated", ] json_headlines_array = json_data["headlines"] first_created = [ tz_replacer(headline["firstCreated"]) for headline in json_headlines_array ] headlines = [ [headline[field] for field in Headline_Selected_Fields] for headline in json_headlines_array ] if len(headlines): headlines_dataframe = pd.DataFrame( headlines, numpy.array(first_created, dtype="datetime64"), Headline_Selected_Fields, ) if not headlines_dataframe.empty: headlines_dataframe = headlines_dataframe.convert_dtypes() else: headlines_dataframe = pd.DataFrame( [], numpy.array(first_created, dtype="datetime64"), Headline_Selected_Fields ) headlines_dataframe["versionCreated"] = headlines_dataframe.versionCreated.apply( pd.to_datetime ) return headlines_dataframe def get_news_story(story_id, raw_output=False, debug=False): """ Return a single news story corresponding to the identifier provided in story_id Parameters ---------- story_id: The story id. The story id is a field you will find in every headline you retrieved with the legacy get_news_headlines raw_output: boolean Set this parameter to True to get the data in json format if set to False, the legacy will return returns the story content The default value is False debug: bool When set to True, the json request and response are printed. Raises ------ Exception If http request fails or if Refinitiv Services return an error ValueError If a parameter type or value is wrong Examples -------- >> import refinitiv.dataplatform as ek >> ek.set_app_key('set your app key here') >> headlines = ek.get_news_headlines('IBM') >> for index, headline_row in headlines.iterrows(): story = ek.get_news_story(headline_row['storyId']) print (story) """ logger = DefaultSession.get_default_session().logger() # check parameters type and values if not is_string_type(story_id): error_msg = "story_id must be a string" logger.error(error_msg) raise ValueError(error_msg) app_key = DefaultSession.get_default_session().app_key payload = {"attributionCode": "", "productName": app_key, "storyId": story_id} response = refinitiv.dataplatform.legacy.json_requests.send_json_request( News_Story_UDF_endpoint, payload, debug=debug ) json_data = response.json() if raw_output: return json_data if json_data: if json_data.get("story"): if json_data.get("story").get("storyHtml"): return json_data.get("story").get("storyHtml") else: return None elif json_data.get("viewURL"): return json_data.get("viewURL") elif json_data.get("webURL"): return json_data.get("webURL") return None
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/legacy/news_request.py
0.657978
0.251429
news_request.py
pypi
__all__ = ["get_timeseries"] import refinitiv.dataplatform.legacy.json_requests from .tools import ( is_string_type, check_for_string_or_list_of_strings, check_for_string, check_for_int, get_json_value, to_datetime, get_date_from_today, tz_replacer, DefaultSession, ) import pandas as pd import numpy from refinitiv.dataplatform.errors import RDPError TimeSeries_UDF_endpoint = "TimeSeries" Calendar_Values = ["native", "tradingdays", "calendardays"] Corax_Values = ["adjusted", "unadjusted"] def get_timeseries( rics, fields=None, start_date=None, end_date=None, interval=None, count=None, calendar=None, corax=None, normalize=False, raw_output=False, debug=False, ): """ Returns historical data on one or several RICs Parameters ---------- rics: string or list of strings Single RIC or List of RICs to retrieve historical data for start_date: string or datetime.datetime or datetime.timedelta Starting date and time of the historical range. string format is: '%Y-%m-%dT%H:%M:%S'. e.g. '2016-01-20T15:04:05'. datetime.timedelta is negative number of day relative to datetime.now(). Default: datetime.now() + timedelta(-100) You can use the helper legacy get_date_from_today, please see the usage in the examples section end_date: string or datetime.datetime or datetime.timedelta End date and time of the historical range. string format could be: '%Y-%m-%d' (e.g. '2017-01-20') '%Y-%m-%dT%H:%M:%S' (e.g. '2017-01-20T15:04:05') datetime.timedelta is negative number of day relative to datetime.now(). Default: datetime.now() You can use the helper legacy get_date_from_today, please see the usage in the examples section interval: string Data interval. Possible values: 'tick', 'minute', 'hour', 'daily', 'weekly', 'monthly', 'quarterly', 'yearly' (Default 'daily') Default: 'daily' fields: string or list of strings Use this parameter to filter the returned fields set. Available fields: 'TIMESTAMP', 'VALUE', 'VOLUME', 'HIGH', 'LOW', 'OPEN', 'CLOSE', 'COUNT' By default all fields are returned. count: int, optional Max number of data points retrieved. calendar: string, optional Possible values: 'native', 'tradingdays', 'calendardays'. corax: string, optional Possible values: 'adjusted', 'unadjusted' normalize: boolean, optional if set to True, the legacy will return a normalized data frame with the following columns 'Date','Security','Field' If the value of this parameter is False the returned data frame shape will depend on the number of rics and the number of fields in the response There are three different shapes - One ric and many fields - Many rics and one field - Many rics and many fields Default: False Remark: This parameter has a less precedence than the parameter rawOutput i.e. if rawOutput is set to True, the returned data will be the raw data and this parameter will be ignored raw_output: boolean, optional Set this parameter to True to get the data in json format if set to False, the legacy will return a data frame which shape is defined by the parameter normalize Default: False debug: boolean, optional When set to True, the json request and response are printed. Default: False Raises ------ Exception If request fails or if server returns an error ValueError If a parameter type or value is wrong Examples -------- >> import refinitiv.dataplatform as ek >> ek.set_app_key('set your app key here') >> req = ek.get_timeseries(["MSFT.O"], start_date = "2017-02-01T15:04:05", >> end_date = "2017-02-05T15:04:05", interval="tick") >> req = ek.get_timeseries(["MSFT.O"], start_date = "2017-03-01", >> end_date = "2017-03-10", interval="daily") >> req = ek.get_timeseries(["MSFT.O"], start_date = get_date_from_today(150), >> end_date = get_date_from_today(100), interval="daily") """ logger = DefaultSession.get_default_session().logger() # set the ric(s) in the payload check_for_string_or_list_of_strings(rics, "rics") if is_string_type(rics): rics = [rics.strip()] if type(rics) == list: rics = [ric.upper() if ric.islower() else ric for ric in rics] # set the field(s) in the payload if fields is None or fields == "*": fields = ["*"] else: check_for_string_or_list_of_strings(fields, "fields") if is_string_type(fields): fields = fields.strip().upper().split() else: fields = [x.upper() for x in fields] if "*" in fields: fields = ["*"] elif "TIMESTAMP" not in fields: fields.append("TIMESTAMP") if interval is None: interval = "daily" # check the interval in the payload check_for_string(interval, "interval") if start_date is None: start_date = get_date_from_today(100) if end_date is None: end_date = get_date_from_today(0) start_date = to_datetime(start_date).isoformat() end_date = to_datetime(end_date).isoformat() if start_date > end_date: with "end date ({})should be after than start date ({})".format( end_date, start_date ) as error_msg: logger.error(error_msg) raise ValueError(error_msg) payload = { "rics": rics, "fields": fields, "interval": interval, "startdate": start_date, "enddate": end_date, } # Add optional parameters # set the count in the payload if count is not None: check_for_int(count, "count") payload.update({"count": count}) # set the calendar in the payload if calendar is not None: if is_string_type(calendar): payload.update({"calendar": calendar}) else: with "calendar must has string type" as error_msg: logger.error(error_msg) raise ValueError(error_msg) # set the corax in the payload if corax is not None: if is_string_type(corax): payload.update({"corax": corax}) else: with "corax must be a string" as error_msg: logger.error(error_msg) raise ValueError(error_msg) response = refinitiv.dataplatform.legacy.json_requests.send_json_request( TimeSeries_UDF_endpoint, payload, debug=debug ) ts_result = response.json() # Catch all errors to raise a warning ts_timeserie_data = ts_result["timeseriesData"] ts_status_errors = [ ts_data for ts_data in ts_timeserie_data if get_json_value(ts_data, "statusCode") == "Error" ] ts_error_messages = "" for ts_status in ts_status_errors: ts_error_message = get_json_value(ts_status, "errorMessage") ts_error_message = ts_error_message[ts_error_message.find("Description") :] ts_instrument = get_json_value(ts_status, "ric") ts_error_message = ts_error_message.replace("Description", ts_instrument) ts_error_messages += ts_error_message ts_error_messages += " | " warning_message = "Error with {}".format(ts_error_message) logger.warning(warning_message) # if all timeseries are in error, then raise ElektronError with all error messages if len(ts_status_errors) == len(ts_timeserie_data): logger.error(ts_error_messages) raise RDPError("Error", message=ts_error_messages) if raw_output: return ts_result data_frame = None if normalize: data_frame = NormalizedDataFrame_Formatter(ts_result).get_data_frame() else: data_frame = NiceDataFrame_Formatter(ts_result).get_data_frame() if len(data_frame) > 0: data_frame = data_frame.fillna(numpy.nan) return data_frame class NormalizedDataFrame_Formatter: def __init__(self, json_data): self.json_data = json_data def get_data_frame(self): timeseriesList = self.json_data["timeseriesData"] data_dict_list = [] data_frames = [] for timeseries in timeseriesList: ric = timeseries["ric"] error_code = timeseries["statusCode"] if error_code.lower() == "error": continue fields = [f["name"] for f in timeseries["fields"]] timestamp_index = fields.index("TIMESTAMP") fields.pop( timestamp_index ) # remove timestamp from fields (timestamp is used as index for dataframe) datapoints = numpy.array(timeseries["dataPoints"]) if len(datapoints): timestamps = [ tz_replacer(value) for value in datapoints[:, timestamp_index] ] timestamps = numpy.array( timestamps, dtype="datetime64" ) # index for dataframe # remove timestamp column from numpy array datapoints = numpy.delete(datapoints, numpy.s_[timestamp_index], 1) fields_count = len(fields) column_size = len(datapoints) symbol_column = numpy.array([ric] * fields_count * column_size) fields_column = numpy.array(fields * column_size) values_column = numpy.concatenate(datapoints, axis=0) timestamp_column = [ [timestamps[i]] * fields_count for i in range(timestamps.size) ] timestamp_column = numpy.concatenate(timestamp_column, axis=0) df = pd.DataFrame( dict( Date=timestamp_column, Security=symbol_column, Field=fields_column, Value=values_column, ), dtype="float", ) if not df.empty: df = df.convert_dtypes() data_frames.append(df) else: data_frames.append(pd.DataFrame([], columns=fields)) return pd.concat(data_frames) class NiceDataFrame_Formatter: def __init__(self, json_data): self.json_data = json_data def get_data_frame(self): data_frames, rics, fields = self._get_frame_list() rics_count = len(rics) fields_count = len(fields) if rics_count == 0 or fields_count == 0: return data_frames if rics_count == 1: return self._get_frame_1_ric_N_fields(data_frames, rics[0]) if rics_count > 1 and fields_count == 1: return self._get_frame_N_rics_1_field(data_frames, rics, fields[0]) return self._get_frame_N_rics_N_fields(data_frames, rics, fields) def _get_frame_list(self): timeseriesList = self.json_data["timeseriesData"] data_frames = [] unique_fields = [] rics = [] for timeseries in timeseriesList: ric = timeseries["ric"] error_code = timeseries["statusCode"] if error_code.lower() == "error": continue rics.append(ric) fields = [f["name"] for f in timeseries["fields"]] timestamp_index = fields.index("TIMESTAMP") fields.pop( timestamp_index ) # remove timestamp from fields (timestamp is used as index for dataframe) unique_fields = fields datapoints = numpy.array(timeseries["dataPoints"]) if len(datapoints): timestamps = numpy.array( [tz_replacer(value) for value in datapoints[:, timestamp_index]], dtype="datetime64", ) # index for dataframe datapoints = numpy.delete( datapoints, numpy.s_[timestamp_index], 1 ) # remove timestamp column from numpy array df = pd.DataFrame( datapoints, columns=fields, index=timestamps, dtype="float" ) if not df.empty: df = df.convert_dtypes() else: df = pd.DataFrame([], columns=fields) if not df.empty: df = df.convert_dtypes() df.index.name = "Date" data_frames.append(df) return data_frames, list(rics), list(unique_fields) def _get_frame_1_ric_N_fields(self, data_frames, ricName): data_frame = pd.concat(data_frames, axis=1) if not data_frame.empty: data_frame = data_frame.convert_dtypes() data_frame.axes[1].name = ricName return data_frame def _get_frame_N_rics_1_field(self, data_frames, rics, fieldName): ric_index = 0 for df in data_frames: ric_name = rics[ric_index] df.rename(columns={fieldName: ric_name}, inplace=True) ric_index += 1 data_frame = pd.concat(data_frames, axis=1) if not data_frame.empty: data_frame = data_frame.convert_dtypes() data_frame.axes[1].name = fieldName return data_frame def _get_frame_N_rics_N_fields(self, data_frames, rics, fields): ric_index = 0 for df in data_frames: ric_name = rics[ric_index] columns = [(ric_name, f) for f in df.columns] df.columns = pd.MultiIndex.from_tuples(columns) ric_index += 1 data_frame = pd.concat(data_frames, axis=1) data_frame.axes[1].names = ["Security", "Field"] if data_frame.empty: return data_frame else: return data_frame.convert_dtypes()
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/legacy/time_series.py
0.691914
0.324998
time_series.py
pypi
from typing import Union from urllib.parse import ParseResult, urlparse, ParseResultBytes import numpy as np def get_from_path(obj, path, delim="."): splitted = path.split(delim) for k in splitted: if hasattr(obj, "get"): obj = obj.get(k) elif iterable(obj) and is_int(k): obj = obj[int(k)] return obj def is_int(obj): if isinstance(obj, str): try: int(obj) except Exception: return False else: return True return isinstance(obj, int) def iterable(obj): try: iter(obj) except Exception: return False else: return True def urljoin(*pieces): # first piece have a leading slash if pieces and len(pieces[0]) > 1 and pieces[0][0] == "/": pieces = ("/",) + pieces # last piece have a trailing slash if pieces and len(pieces[-1]) > 1 and pieces[-1][-1] == "/": pieces = pieces + ("/",) return "/".join(s.strip("/") for s in pieces) def is_any_defined(*args): return any(args) def is_all_defined(*args): return all(args) def is_all_same_type(item_type, iterable): return all(isinstance(item, item_type) for item in iterable) def make_counter(): i = 0 def counter(): nonlocal i i += 1 return i return counter def get_response_reason(response): if hasattr(response, "reason_phrase"): assert not hasattr(response, "reason") return response.reason_phrase elif hasattr(response, "reason"): return response.reason return "unknown reason" class cached_property(object): def __init__(self, func): self.func = func def __get__(self, instance, cls=None): result = instance.__dict__[self.func.__name__] = self.func(instance) return result def parse_list_of_str(param: Union[str, list]) -> list: if isinstance(param, str): return [param] if isinstance(param, list): if is_all_same_type(str, param): return param else: raise ValueError(f"Not all elements are strings in {param}") raise ValueError(f"Invalid type, expected str or list:{type(param)} is given") class ArgsParser: def __init__(self, parse) -> None: self.parse = parse def get_str(self, *args, delim=None) -> str: if delim is not None: retval = delim.join(str(item) for item in self.get_list(*args)) else: retval = self.parse(*args) if not isinstance(retval, str): retval = str(retval) return retval def get_list(self, *args) -> list: retval = self.parse(*args) if not isinstance(retval, list): retval = [retval] return retval def get_float(self, *args) -> float: retval = self.parse(*args) if isinstance(retval, np.datetime64): retval = retval.astype(float) else: retval = float(retval) return retval def get_bool(self, *args) -> bool: retval = self.parse(*args) if not isinstance(retval, bool): retval = bool(retval) return retval universe_arg_parser = ArgsParser(parse_list_of_str) def parse_url(url: str) -> ParseResult: import sys py_ver = sys.version_info if py_ver.major == 3 and py_ver.minor < 9: result_urlparse = urlparse(url) if isinstance(result_urlparse, ParseResultBytes): return result_urlparse scheme = result_urlparse.scheme netloc = result_urlparse.netloc path = result_urlparse.path query = result_urlparse.query fragment = result_urlparse.fragment if not scheme and not netloc and path and ":" in path: splitted = path.split(":") if len(splitted) == 2: scheme, path = splitted result = ParseResult( scheme=scheme, netloc=netloc, path=path, params=result_urlparse.params, query=query, fragment=fragment, ) else: result = urlparse(url) return result def get_scheme_port(scheme, port): if not scheme and not port: scheme = "ws" port = 80 elif not scheme and port: scheme = "ws" if port == 443: scheme = "wss" elif scheme and not port: port = 80 if scheme == "wss": port = 443 return scheme, port
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/tools/_common.py
0.751739
0.277158
_common.py
pypi
import asyncio import http import inspect import io import json import queue import threading import types import typing from urllib.parse import unquote, urljoin, urlsplit import requests from . import http3 from .adapters import HTTPAdapter from .sessions import Session class _HeaderDict(requests.packages.urllib3._collections.HTTPHeaderDict): def get_all(self, key: str, default: str) -> str: return self.getheaders(key) class _MockOriginalResponse: """ We have to jump through some hoops to present the response as if it was made using urllib3. """ def __init__(self, headers: typing.List[typing.Tuple[bytes, bytes]]) -> None: self.msg = _HeaderDict(headers) self.closed = False def isclosed(self) -> bool: return self.closed def _get_reason_phrase(status_code: int) -> str: try: return http.HTTPStatus(status_code).phrase except ValueError: return "" class ASGIAdapter(HTTPAdapter): def __init__(self, app, suppress_exceptions: bool = False) -> None: self.app = app self.suppress_exceptions = suppress_exceptions async def send( # type: ignore self, request: requests.PreparedRequest, *args: typing.Any, **kwargs: typing.Any ) -> requests.Response: scheme, netloc, path, query, fragment = urlsplit(request.url) # type: ignore default_port = {"http": 80, "ws": 80, "https": 443, "wss": 443}[scheme] if ":" in netloc: host, port_string = netloc.split(":", 1) port = int(port_string) else: host = netloc port = default_port # Include the 'host' header. if "host" in request.headers: headers = [] # type: typing.List[typing.Tuple[bytes, bytes]] elif port == default_port: headers = [(b"host", host.encode())] else: headers = [(b"host", (f"{host}:{port}").encode())] # Include other request headers. headers += [ (key.lower().encode(), value.encode()) for key, value in request.headers.items() ] scope = { "type": "http", "http_version": "1.1", "method": request.method, "path": unquote(path), "root_path": "", "scheme": scheme, "query_string": query.encode(), "headers": headers, "client": ["testclient", 50000], "server": [host, port], "extensions": {"http.response.template": {}}, } async def receive(): nonlocal request_complete, response_complete if request_complete: while not response_complete: await asyncio.sleep(0.0001) return {"type": "http.disconnect"} body = request.body if isinstance(body, str): body_bytes = body.encode("utf-8") # type: bytes elif body is None: body_bytes = b"" elif isinstance(body, types.GeneratorType): try: chunk = body.send(None) if isinstance(chunk, str): chunk = chunk.encode("utf-8") return {"type": "http.request", "body": chunk, "more_body": True} except StopIteration: request_complete = True return {"type": "http.request", "body": b""} else: body_bytes = body request_complete = True return {"type": "http.request", "body": body_bytes} async def send(message) -> None: nonlocal raw_kwargs, response_started, response_complete, template, context if message["type"] == "http.response.start": assert ( not response_started ), 'Received multiple "http.response.start" messages.' raw_kwargs["status_code"] = message["status"] raw_kwargs["headers"] = message["headers"] response_started = True elif message["type"] == "http.response.body": assert ( response_started ), 'Received "http.response.body" without "http.response.start".' assert ( not response_complete ), 'Received "http.response.body" after response completed.' body = message.get("body", b"") more_body = message.get("more_body", False) if request.method != "HEAD": raw_kwargs["content"] += body if not more_body: response_complete = True elif message["type"] == "http.response.template": template = message["template"] context = message["context"] request_complete = False response_started = False response_complete = False raw_kwargs = {"content": b""} # type: typing.Dict[str, typing.Any] template = None context = None try: await self.app(scope, receive, send) except BaseException as exc: if not self.suppress_exceptions: raise exc from None if not self.suppress_exceptions: assert response_started, "TestClient did not receive any response." elif not response_started: raw_kwargs = {"status_code": 500, "headers": []} raw = http3.AsyncResponse(**raw_kwargs) response = self.build_response(request, raw) if template is not None: response.template = template response.context = context return response class ASGISession(Session): def __init__( self, app, base_url: str = "http://mockserver", suppress_exceptions: bool = False, ) -> None: super(ASGISession, self).__init__() adapter = ASGIAdapter(app, suppress_exceptions=suppress_exceptions) self.mount("http://", adapter) self.mount("https://", adapter) self.headers.update({"user-agent": "testclient"}) self.app = app self.base_url = base_url async def request(self, method, url, *args, **kwargs) -> requests.Response: url = urljoin(self.base_url, url) return await super().request(method, url, *args, **kwargs)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/vendor/requests_async/asgi.py
0.610337
0.162579
asgi.py
pypi
import binascii import mimetypes import os import typing from io import BytesIO from urllib.parse import quote class Field: def render_headers(self) -> bytes: raise NotImplementedError() # pragma: nocover def render_data(self) -> bytes: raise NotImplementedError() # pragma: nocover class DataField(Field): def __init__(self, name: str, value: str) -> None: self.name = name self.value = value def render_headers(self) -> bytes: name = quote(self.name, encoding="utf-8").encode("ascii") return b"".join( [b'Content-Disposition: form-data; name="', name, b'"\r\n' b"\r\n"] ) def render_data(self) -> bytes: return self.value.encode("utf-8") class FileField(Field): def __init__( self, name: str, value: typing.Union[typing.IO[typing.AnyStr], tuple] ) -> None: self.name = name if not isinstance(value, tuple): self.filename = os.path.basename(getattr(value, "name", "upload")) self.file = value # type: typing.Union[typing.IO[str], typing.IO[bytes]] self.content_type = self.guess_content_type() else: self.filename = value[0] self.file = value[1] self.content_type = ( value[2] if len(value) > 2 else self.guess_content_type() ) def guess_content_type(self) -> str: return mimetypes.guess_type(self.filename)[0] or "application/octet-stream" def render_headers(self) -> bytes: name = quote(self.name, encoding="utf-8").encode("ascii") filename = quote(self.filename, encoding="utf-8").encode("ascii") content_type = self.content_type.encode("ascii") return b"".join( [ b'Content-Disposition: form-data; name="', name, b'"; filename="', filename, b'"\r\n', b"Content-Type: ", content_type, b"\r\n", b"\r\n", ] ) def render_data(self) -> bytes: content = self.file.read() return content.encode("utf-8") if isinstance(content, str) else content def iter_fields(data: dict, files: dict) -> typing.Iterator[Field]: for name, value in data.items(): if isinstance(value, list): for item in value: yield DataField(name=name, value=item) else: yield DataField(name=name, value=value) for name, value in files.items(): yield FileField(name=name, value=value) def multipart_encode(data: dict, files: dict) -> typing.Tuple[bytes, str]: body = BytesIO() boundary = binascii.hexlify(os.urandom(16)) for field in iter_fields(data, files): body.write(b"--%s\r\n" % boundary) body.write(field.render_headers()) body.write(field.render_data()) body.write(b"\r\n") body.write(b"--%s--\r\n" % boundary) content_type = "multipart/form-data; boundary=%s" % boundary.decode("ascii") return body.getvalue(), content_type
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/vendor/requests_async/http3/multipart.py
0.540196
0.189521
multipart.py
pypi
from enum import IntEnum class StatusCode(IntEnum): """HTTP status codes and reason phrases Status codes from the following RFCs are all observed: * RFC 7231: Hypertext Transfer Protocol (HTTP/1.1), obsoletes 2616 * RFC 6585: Additional HTTP Status Codes * RFC 3229: Delta encoding in HTTP * RFC 4918: HTTP Extensions for WebDAV, obsoletes 2518 * RFC 5842: Binding Extensions to WebDAV * RFC 7238: Permanent Redirect * RFC 2295: Transparent Content Negotiation in HTTP * RFC 2774: An HTTP Extension Framework * RFC 7540: Hypertext Transfer Protocol Version 2 (HTTP/2) """ def __new__(cls, value: int, phrase: str = "") -> "StatusCode": obj = int.__new__(cls, value) # type: ignore obj._value_ = value obj.phrase = phrase return obj def __str__(self) -> str: return str(self.value) @classmethod def get_reason_phrase(cls, value: int) -> str: try: return StatusCode(value).phrase # type: ignore except ValueError: return "" @classmethod def is_redirect(cls, value: int) -> bool: return value in ( StatusCode.MOVED_PERMANENTLY, # 301 (Cacheable redirect. Method may change to GET.) StatusCode.FOUND, # 302 (Uncacheable redirect. Method may change to GET.) StatusCode.SEE_OTHER, # 303 (Client should make a GET or HEAD request.) StatusCode.TEMPORARY_REDIRECT, # 307 (Equiv. 302, but retain method) StatusCode.PERMANENT_REDIRECT, # 308 (Equiv. 301, but retain method) ) @classmethod def is_client_error(cls, value: int) -> bool: return value >= 400 and value <= 499 @classmethod def is_server_error(cls, value: int) -> bool: return value >= 500 and value <= 599 # informational CONTINUE = 100, "Continue" SWITCHING_PROTOCOLS = 101, "Switching Protocols" PROCESSING = 102, "Processing" # success OK = 200, "OK" CREATED = 201, "Created" ACCEPTED = 202, "Accepted" NON_AUTHORITATIVE_INFORMATION = 203, "Non-Authoritative Information" NO_CONTENT = 204, "No Content" RESET_CONTENT = 205, "Reset Content" PARTIAL_CONTENT = 206, "Partial Content" MULTI_STATUS = 207, "Multi-Status" ALREADY_REPORTED = 208, "Already Reported" IM_USED = 226, "IM Used" # redirection MULTIPLE_CHOICES = 300, "Multiple Choices" MOVED_PERMANENTLY = 301, "Moved Permanently" FOUND = 302, "Found" SEE_OTHER = 303, "See Other" NOT_MODIFIED = 304, "Not Modified" USE_PROXY = 305, "Use Proxy" TEMPORARY_REDIRECT = 307, "Temporary Redirect" PERMANENT_REDIRECT = 308, "Permanent Redirect" # client error BAD_REQUEST = 400, "Bad Request" UNAUTHORIZED = 401, "Unauthorized" PAYMENT_REQUIRED = 402, "Payment Required" FORBIDDEN = 403, "Forbidden" NOT_FOUND = 404, "Not Found" METHOD_NOT_ALLOWED = 405, "Method Not Allowed" NOT_ACCEPTABLE = 406, "Not Acceptable" PROXY_AUTHENTICATION_REQUIRED = 407, "Proxy Authentication Required" REQUEST_TIMEOUT = 408, "Request Timeout" CONFLICT = 409, "Conflict" GONE = 410, "Gone" LENGTH_REQUIRED = 411, "Length Required" PRECONDITION_FAILED = 412, "Precondition Failed" REQUEST_ENTITY_TOO_LARGE = 413, "Request Entity Too Large" REQUEST_URI_TOO_LONG = 414, "Request-URI Too Long" UNSUPPORTED_MEDIA_TYPE = 415, "Unsupported Media Type" REQUESTED_RANGE_NOT_SATISFIABLE = 416, "Requested Range Not Satisfiable" EXPECTATION_FAILED = 417, "Expectation Failed" MISDIRECTED_REQUEST = 421, "Misdirected Request" UNPROCESSABLE_ENTITY = 422, "Unprocessable Entity" LOCKED = 423, "Locked" FAILED_DEPENDENCY = 424, "Failed Dependency" UPGRADE_REQUIRED = 426, "Upgrade Required" PRECONDITION_REQUIRED = 428, "Precondition Required" TOO_MANY_REQUESTS = 429, "Too Many Requests" REQUEST_HEADER_FIELDS_TOO_LARGE = 431, "Request Header Fields Too Large" # server errors INTERNAL_SERVER_ERROR = 500, "Internal Server Error" NOT_IMPLEMENTED = 501, "Not Implemented" BAD_GATEWAY = 502, "Bad Gateway" SERVICE_UNAVAILABLE = 503, "Service Unavailable" GATEWAY_TIMEOUT = 504, "Gateway Timeout" HTTP_VERSION_NOT_SUPPORTED = 505, "HTTP Version Not Supported" VARIANT_ALSO_NEGOTIATES = 506, "Variant Also Negotiates" INSUFFICIENT_STORAGE = 507, "Insufficient Storage" LOOP_DETECTED = 508, "Loop Detected" NOT_EXTENDED = 510, "Not Extended" NETWORK_AUTHENTICATION_REQUIRED = 511, "Network Authentication Required" codes = StatusCode #  Include lower-case styles for `requests` compatability. for code in codes: setattr(codes, code._name_.lower(), int(code))
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/vendor/requests_async/http3/status_codes.py
0.722135
0.260058
status_codes.py
pypi
import enum import ssl import typing from types import TracebackType from .config import CertTypes, PoolLimits, TimeoutConfig, TimeoutTypes, VerifyTypes from .models import ( URL, AsyncRequest, AsyncRequestData, AsyncResponse, Headers, HeaderTypes, QueryParamTypes, Request, RequestData, Response, URLTypes, ) class Protocol(str, enum.Enum): HTTP_11 = "HTTP/1.1" HTTP_2 = "HTTP/2" class AsyncDispatcher: """ Base class for async dispatcher classes, that handle sending the request. Stubs out the interface, as well as providing a `.request()` convienence implementation, to make it easy to use or test stand-alone dispatchers, without requiring a complete `Client` instance. """ async def request( self, method: str, url: URLTypes, *, data: AsyncRequestData = b"", params: QueryParamTypes = None, headers: HeaderTypes = None, verify: VerifyTypes = None, cert: CertTypes = None, timeout: TimeoutTypes = None ) -> AsyncResponse: request = AsyncRequest(method, url, data=data, params=params, headers=headers) return await self.send(request, verify=verify, cert=cert, timeout=timeout) async def send( self, request: AsyncRequest, verify: VerifyTypes = None, cert: CertTypes = None, timeout: TimeoutTypes = None, ) -> AsyncResponse: raise NotImplementedError() # pragma: nocover async def close(self) -> None: pass # pragma: nocover async def __aenter__(self) -> "AsyncDispatcher": return self async def __aexit__( self, exc_type: typing.Type[BaseException] = None, exc_value: BaseException = None, traceback: TracebackType = None, ) -> None: await self.close() class Dispatcher: """ Base class for syncronous dispatcher classes, that handle sending the request. Stubs out the interface, as well as providing a `.request()` convienence implementation, to make it easy to use or test stand-alone dispatchers, without requiring a complete `Client` instance. """ def request( self, method: str, url: URLTypes, *, data: RequestData = b"", params: QueryParamTypes = None, headers: HeaderTypes = None, verify: VerifyTypes = None, cert: CertTypes = None, timeout: TimeoutTypes = None ) -> Response: request = Request(method, url, data=data, params=params, headers=headers) return self.send(request, verify=verify, cert=cert, timeout=timeout) def send( self, request: Request, verify: VerifyTypes = None, cert: CertTypes = None, timeout: TimeoutTypes = None, ) -> Response: raise NotImplementedError() # pragma: nocover def close(self) -> None: pass # pragma: nocover def __enter__(self) -> "Dispatcher": return self def __exit__( self, exc_type: typing.Type[BaseException] = None, exc_value: BaseException = None, traceback: TracebackType = None, ) -> None: self.close() class BaseReader: """ A stream reader. Abstracts away any asyncio-specfic interfaces into a more generic base class, that we can use with alternate backend, or for stand-alone test cases. """ async def read( self, n: int, timeout: TimeoutConfig = None, flag: typing.Any = None ) -> bytes: raise NotImplementedError() # pragma: no cover class BaseWriter: """ A stream writer. Abstracts away any asyncio-specfic interfaces into a more generic base class, that we can use with alternate backend, or for stand-alone test cases. """ def write_no_block(self, data: bytes) -> None: raise NotImplementedError() # pragma: no cover async def write(self, data: bytes, timeout: TimeoutConfig = None) -> None: raise NotImplementedError() # pragma: no cover async def close(self) -> None: raise NotImplementedError() # pragma: no cover class BasePoolSemaphore: """ A semaphore for use with connection pooling. Abstracts away any asyncio-specfic interfaces. """ async def acquire(self) -> None: raise NotImplementedError() # pragma: no cover def release(self) -> None: raise NotImplementedError() # pragma: no cover class ConcurrencyBackend: async def connect( self, hostname: str, port: int, ssl_context: typing.Optional[ssl.SSLContext], timeout: TimeoutConfig, ) -> typing.Tuple[BaseReader, BaseWriter, Protocol]: raise NotImplementedError() # pragma: no cover def get_semaphore(self, limits: PoolLimits) -> BasePoolSemaphore: raise NotImplementedError() # pragma: no cover async def run_in_threadpool( self, func: typing.Callable, *args: typing.Any, **kwargs: typing.Any ) -> typing.Any: raise NotImplementedError() # pragma: no cover async def iterate_in_threadpool(self, iterator): # type: ignore class IterationComplete(Exception): pass def next_wrapper(iterator): # type: ignore try: return next(iterator) except StopIteration: raise IterationComplete() while True: try: yield await self.run_in_threadpool(next_wrapper, iterator) except IterationComplete: break def run( self, coroutine: typing.Callable, *args: typing.Any, **kwargs: typing.Any ) -> typing.Any: raise NotImplementedError() # pragma: no cover def iterate(self, async_iterator): # type: ignore while True: try: yield self.run(async_iterator.__anext__) except StopAsyncIteration: break def background_manager( self, coroutine: typing.Callable, args: typing.Any ) -> "BaseBackgroundManager": raise NotImplementedError() # pragma: no cover class BaseBackgroundManager: async def __aenter__(self) -> "BaseBackgroundManager": raise NotImplementedError() # pragma: no cover async def __aexit__( self, exc_type: typing.Type[BaseException] = None, exc_value: BaseException = None, traceback: TracebackType = None, ) -> None: raise NotImplementedError() # pragma: no cover
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/vendor/requests_async/http3/interfaces.py
0.753648
0.158565
interfaces.py
pypi
import asyncio import os import ssl import typing import certifi from .__version__ import __version__ CertTypes = typing.Union[str, typing.Tuple[str, str]] VerifyTypes = typing.Union[str, bool] TimeoutTypes = typing.Union[float, typing.Tuple[float, float, float], "TimeoutConfig"] USER_AGENT = f"python-http3/{__version__}" DEFAULT_CIPHERS = ":".join( [ "ECDHE+AESGCM", "ECDHE+CHACHA20", "DHE+AESGCM", "DHE+CHACHA20", "ECDH+AESGCM", "DH+AESGCM", "ECDH+AES", "DH+AES", "RSA+AESGCM", "RSA+AES", "!aNULL", "!eNULL", "!MD5", "!DSS", ] ) class SSLConfig: """ SSL Configuration. """ def __init__(self, *, cert: CertTypes = None, verify: VerifyTypes = True): self.cert = cert self.verify = verify def __eq__(self, other: typing.Any) -> bool: return ( isinstance(other, self.__class__) and self.cert == other.cert and self.verify == other.verify ) def __repr__(self) -> str: class_name = self.__class__.__name__ return f"{class_name}(cert={self.cert}, verify={self.verify})" def with_overrides( self, cert: CertTypes = None, verify: VerifyTypes = None ) -> "SSLConfig": cert = self.cert if cert is None else cert verify = self.verify if verify is None else verify if (cert == self.cert) and (verify == self.verify): return self return SSLConfig(cert=cert, verify=verify) async def load_ssl_context(self) -> ssl.SSLContext: if not hasattr(self, "ssl_context"): if not self.verify: self.ssl_context = self.load_ssl_context_no_verify() else: # Run the SSL loading in a threadpool, since it makes disk accesses. loop = asyncio.get_event_loop() self.ssl_context = await loop.run_in_executor( None, self.load_ssl_context_verify ) return self.ssl_context def load_ssl_context_no_verify(self) -> ssl.SSLContext: """ Return an SSL context for unverified connections. """ context = ssl.SSLContext(ssl.PROTOCOL_SSLv23) context.options |= ssl.OP_NO_SSLv2 context.options |= ssl.OP_NO_SSLv3 context.options |= ssl.OP_NO_COMPRESSION context.set_default_verify_paths() return context def load_ssl_context_verify(self) -> ssl.SSLContext: """ Return an SSL context for verified connections. """ if isinstance(self.verify, bool): ca_bundle_path = DEFAULT_CA_BUNDLE_PATH elif os.path.exists(self.verify): ca_bundle_path = self.verify else: raise IOError( "Could not find a suitable TLS CA certificate bundle, " "invalid path: {}".format(self.verify) ) context = ssl.create_default_context(purpose=ssl.Purpose.CLIENT_AUTH) context.verify_mode = ssl.CERT_REQUIRED context.options |= ssl.OP_NO_SSLv2 context.options |= ssl.OP_NO_SSLv3 context.options |= ssl.OP_NO_COMPRESSION context.set_ciphers(DEFAULT_CIPHERS) if ssl.HAS_ALPN: context.set_alpn_protocols(["h2", "http/1.1"]) if ssl.HAS_NPN: context.set_npn_protocols(["h2", "http/1.1"]) if os.path.isfile(ca_bundle_path): context.load_verify_locations(cafile=ca_bundle_path) elif os.path.isdir(ca_bundle_path): context.load_verify_locations(capath=ca_bundle_path) if self.cert is not None: if isinstance(self.cert, str): context.load_cert_chain(certfile=self.cert) else: context.load_cert_chain(certfile=self.cert[0], keyfile=self.cert[1]) return context class TimeoutConfig: """ Timeout values. """ def __init__( self, timeout: TimeoutTypes = None, *, connect_timeout: float = None, read_timeout: float = None, write_timeout: float = None, ): if timeout is None: self.connect_timeout = connect_timeout self.read_timeout = read_timeout self.write_timeout = write_timeout else: # Specified as a single timeout value assert connect_timeout is None assert read_timeout is None assert write_timeout is None if isinstance(timeout, TimeoutConfig): self.connect_timeout = timeout.connect_timeout self.read_timeout = timeout.read_timeout self.write_timeout = timeout.write_timeout elif isinstance(timeout, tuple): self.connect_timeout = timeout[0] self.read_timeout = timeout[1] self.write_timeout = timeout[2] else: self.connect_timeout = timeout self.read_timeout = timeout self.write_timeout = timeout def __eq__(self, other: typing.Any) -> bool: return ( isinstance(other, self.__class__) and self.connect_timeout == other.connect_timeout and self.read_timeout == other.read_timeout and self.write_timeout == other.write_timeout ) def __repr__(self) -> str: class_name = self.__class__.__name__ if len(set([self.connect_timeout, self.read_timeout, self.write_timeout])) == 1: return f"{class_name}(timeout={self.connect_timeout})" return f"{class_name}(connect_timeout={self.connect_timeout}, read_timeout={self.read_timeout}, write_timeout={self.write_timeout})" class PoolLimits: """ Limits on the number of connections in a connection pool. """ def __init__( self, *, soft_limit: int = None, hard_limit: int = None, pool_timeout: float = None, ): self.soft_limit = soft_limit self.hard_limit = hard_limit self.pool_timeout = pool_timeout def __eq__(self, other: typing.Any) -> bool: return ( isinstance(other, self.__class__) and self.soft_limit == other.soft_limit and self.hard_limit == other.hard_limit and self.pool_timeout == other.pool_timeout ) def __repr__(self) -> str: class_name = self.__class__.__name__ return f"{class_name}(soft_limit={self.soft_limit}, hard_limit={self.hard_limit}, pool_timeout={self.pool_timeout})" DEFAULT_SSL_CONFIG = SSLConfig(cert=None, verify=True) DEFAULT_TIMEOUT_CONFIG = TimeoutConfig(timeout=5.0) DEFAULT_POOL_LIMITS = PoolLimits(soft_limit=10, hard_limit=100, pool_timeout=5.0) DEFAULT_CA_BUNDLE_PATH = certifi.where() DEFAULT_MAX_REDIRECTS = 20
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/vendor/requests_async/http3/config.py
0.705582
0.153074
config.py
pypi
import typing import zlib from .exceptions import DecodingError try: import brotli except ImportError: # pragma: nocover brotli = None class Decoder: def decode(self, data: bytes) -> bytes: raise NotImplementedError() # pragma: nocover def flush(self) -> bytes: raise NotImplementedError() # pragma: nocover class IdentityDecoder(Decoder): """ Handle unencoded data. """ def decode(self, data: bytes) -> bytes: return data def flush(self) -> bytes: return b"" class DeflateDecoder(Decoder): """ Handle 'deflate' decoding. See: https://stackoverflow.com/questions/1838699 """ def __init__(self) -> None: self.decompressor = zlib.decompressobj(-zlib.MAX_WBITS) def decode(self, data: bytes) -> bytes: try: return self.decompressor.decompress(data) except zlib.error as exc: raise DecodingError from exc def flush(self) -> bytes: try: return self.decompressor.flush() except zlib.error as exc: # pragma: nocover raise DecodingError from exc class GZipDecoder(Decoder): """ Handle 'gzip' decoding. See: https://stackoverflow.com/questions/1838699 """ def __init__(self) -> None: self.decompressor = zlib.decompressobj(zlib.MAX_WBITS | 16) def decode(self, data: bytes) -> bytes: try: return self.decompressor.decompress(data) except zlib.error as exc: raise DecodingError from exc def flush(self) -> bytes: try: return self.decompressor.flush() except zlib.error as exc: # pragma: nocover raise DecodingError from exc class BrotliDecoder(Decoder): """ Handle 'brotli' decoding. Requires `pip install brotlipy`. See: https://brotlipy.readthedocs.io/ """ def __init__(self) -> None: assert ( brotli is not None ), "The 'brotlipy' library must be installed to use 'BrotliDecoder'" self.decompressor = brotli.Decompressor() def decode(self, data: bytes) -> bytes: try: return self.decompressor.decompress(data) except brotli.Error as exc: raise DecodingError from exc def flush(self) -> bytes: try: self.decompressor.finish() return b"" except brotli.Error as exc: # pragma: nocover raise DecodingError from exc class MultiDecoder(Decoder): """ Handle the case where multiple encodings have been applied. """ def __init__(self, children: typing.Sequence[Decoder]) -> None: """ 'children' should be a sequence of decoders in the order in which each was applied. """ # Note that we reverse the order for decoding. self.children = list(reversed(children)) def decode(self, data: bytes) -> bytes: for child in self.children: data = child.decode(data) return data def flush(self) -> bytes: data = b"" for child in self.children: data = child.decode(data) + child.flush() return data SUPPORTED_DECODERS = { "identity": IdentityDecoder, "gzip": GZipDecoder, "deflate": DeflateDecoder, "br": BrotliDecoder, } if brotli is None: SUPPORTED_DECODERS.pop("br") # pragma: nocover ACCEPT_ENCODING = ", ".join( [key for key in SUPPORTED_DECODERS.keys() if key != "identity"] )
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/vendor/requests_async/http3/decoders.py
0.85246
0.483953
decoders.py
pypi
class Timeout(Exception): """ A base class for all timeouts. """ class ConnectTimeout(Timeout): """ Timeout while establishing a connection. """ class ReadTimeout(Timeout): """ Timeout while reading response data. """ class WriteTimeout(Timeout): """ Timeout while writing request data. """ class PoolTimeout(Timeout): """ Timeout while waiting to acquire a connection from the pool. """ # HTTP exceptions... class NotConnected(Exception): """ A connection was lost at the point of starting a request, prior to any writes succeeding. """ class HttpError(Exception): """ An HTTP error occurred. """ class ProtocolError(Exception): """ Malformed HTTP. """ class DecodingError(Exception): """ Decoding of the response failed. """ # Redirect exceptions... class RedirectError(Exception): """ Base class for HTTP redirect errors. """ class TooManyRedirects(RedirectError): """ Too many redirects. """ class RedirectBodyUnavailable(RedirectError): """ Got a redirect response, but the request body was streaming, and is no longer available. """ class RedirectLoop(RedirectError): """ Infinite redirect loop. """ # Stream exceptions... class StreamException(Exception): """ The base class for stream exceptions. The developer made an error in accessing the request stream in an invalid way. """ class StreamConsumed(StreamException): """ Attempted to read or stream response content, but the content has already been streamed. """ class ResponseNotRead(StreamException): """ Attempted to access response content, without having called `read()` after a streaming response. """ class ResponseClosed(StreamException): """ Attempted to read or stream response content, but the request has been closed. """ # Other cases... class InvalidURL(Exception): """ URL was missing a hostname, or was not one of HTTP/HTTPS. """ class CookieConflict(Exception): """ Attempted to lookup a cookie by name, but multiple cookies existed. """
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/vendor/requests_async/http3/exceptions.py
0.793986
0.299064
exceptions.py
pypi
from . import _headers from ._util import bytesify, LocalProtocolError # Everything in __all__ gets re-exported as part of the h11 public API. __all__ = [ "Request", "InformationalResponse", "Response", "Data", "EndOfMessage", "ConnectionClosed", ] class _EventBundle(object): _fields = [] _defaults = {} def __init__(self, **kwargs): _parsed = kwargs.pop("_parsed", False) allowed = set(self._fields) for kwarg in kwargs: if kwarg not in allowed: raise TypeError( "unrecognized kwarg {} for {}" .format(kwarg, self.__class__.__name__)) required = allowed.difference(self._defaults) for field in required: if field not in kwargs: raise TypeError( "missing required kwarg {} for {}" .format(field, self.__class__.__name__)) self.__dict__.update(self._defaults) self.__dict__.update(kwargs) # Special handling for some fields if "headers" in self.__dict__: self.headers = _headers.normalize_and_validate( self.headers, _parsed=_parsed) if not _parsed: for field in ["method", "target", "http_version", "reason"]: if field in self.__dict__: self.__dict__[field] = bytesify(self.__dict__[field]) if "status_code" in self.__dict__: if not isinstance(self.status_code, int): raise LocalProtocolError("status code must be integer") self._validate() def _validate(self): pass def __repr__(self): name = self.__class__.__name__ kwarg_strs = ["{}={}".format(field, self.__dict__[field]) for field in self._fields] kwarg_str = ", ".join(kwarg_strs) return "{}({})".format(name, kwarg_str) # Useful for tests def __eq__(self, other): return (self.__class__ == other.__class__ and self.__dict__ == other.__dict__) def __ne__(self, other): return not self.__eq__(other) # This is an unhashable type. __hash__ = None class Request(_EventBundle): """The beginning of an HTTP request. Fields: .. attribute:: method An HTTP method, e.g. ``b"GET"`` or ``b"POST"``. Always a byte string. :term:`Bytes-like objects <bytes-like object>` and native strings containing only ascii characters will be automatically converted to byte strings. .. attribute:: target The target of an HTTP request, e.g. ``b"/index.html"``, or one of the more exotic formats described in `RFC 7320, section 5.3 <https://tools.ietf.org/html/rfc7230#section-5.3>`_. Always a byte string. :term:`Bytes-like objects <bytes-like object>` and native strings containing only ascii characters will be automatically converted to byte strings. .. attribute:: headers Request headers, represented as a list of (name, value) pairs. See :ref:`the header normalization rules <headers-format>` for details. .. attribute:: http_version The HTTP protocol version, represented as a byte string like ``b"1.1"``. See :ref:`the HTTP version normalization rules <http_version-format>` for details. """ _fields = ["method", "target", "headers", "http_version"] _defaults = {"http_version": b"1.1"} def _validate(self): # "A server MUST respond with a 400 (Bad Request) status code to any # HTTP/1.1 request message that lacks a Host header field and to any # request message that contains more than one Host header field or a # Host header field with an invalid field-value." # -- https://tools.ietf.org/html/rfc7230#section-5.4 host_count = 0 for name, value in self.headers: if name == b"host": host_count += 1 if self.http_version == b"1.1" and host_count == 0: raise LocalProtocolError("Missing mandatory Host: header") if host_count > 1: raise LocalProtocolError("Found multiple Host: headers") class _ResponseBase(_EventBundle): _fields = ["status_code", "headers", "http_version", "reason"] _defaults = {"http_version": b"1.1", "reason": b""} class InformationalResponse(_ResponseBase): """An HTTP informational response. Fields: .. attribute:: status_code The status code of this response, as an integer. For an :class:`InformationalResponse`, this is always in the range [100, 200). .. attribute:: headers Request headers, represented as a list of (name, value) pairs. See :ref:`the header normalization rules <headers-format>` for details. .. attribute:: http_version The HTTP protocol version, represented as a byte string like ``b"1.1"``. See :ref:`the HTTP version normalization rules <http_version-format>` for details. .. attribute:: reason The reason phrase of this response, as a byte string. For example: ``b"OK"``, or ``b"Not Found"``. """ def _validate(self): if not (100 <= self.status_code < 200): raise LocalProtocolError( "InformationalResponse status_code should be in range " "[100, 200), not {}" .format(self.status_code)) class Response(_ResponseBase): """The beginning of an HTTP response. Fields: .. attribute:: status_code The status code of this response, as an integer. For an :class:`Response`, this is always in the range [200, 600). .. attribute:: headers Request headers, represented as a list of (name, value) pairs. See :ref:`the header normalization rules <headers-format>` for details. .. attribute:: http_version The HTTP protocol version, represented as a byte string like ``b"1.1"``. See :ref:`the HTTP version normalization rules <http_version-format>` for details. .. attribute:: reason The reason phrase of this response, as a byte string. For example: ``b"OK"``, or ``b"Not Found"``. """ def _validate(self): if not (200 <= self.status_code < 600): raise LocalProtocolError( "Response status_code should be in range [200, 600), not {}" .format(self.status_code)) class Data(_EventBundle): """Part of an HTTP message body. Fields: .. attribute:: data A :term:`bytes-like object` containing part of a message body. Or, if using the ``combine=False`` argument to :meth:`Connection.send`, then any object that your socket writing code knows what to do with, and for which calling :func:`len` returns the number of bytes that will be written -- see :ref:`sendfile` for details. .. attribute:: chunk_start A marker that indicates whether this data object is from the start of a chunked transfer encoding chunk. This field is ignored when when a Data event is provided to :meth:`Connection.send`: it is only valid on events emitted from :meth:`Connection.next_event`. You probably shouldn't use this attribute at all; see :ref:`chunk-delimiters-are-bad` for details. .. attribute:: chunk_end A marker that indicates whether this data object is the last for a given chunked transfer encoding chunk. This field is ignored when when a Data event is provided to :meth:`Connection.send`: it is only valid on events emitted from :meth:`Connection.next_event`. You probably shouldn't use this attribute at all; see :ref:`chunk-delimiters-are-bad` for details. """ _fields = ["data", "chunk_start", "chunk_end"] _defaults = {"chunk_start": False, "chunk_end": False} # XX FIXME: "A recipient MUST ignore (or consider as an error) any fields that # are forbidden to be sent in a trailer, since processing them as if they were # present in the header section might bypass external security filters." # https://svn.tools.ietf.org/svn/wg/httpbis/specs/rfc7230.html#chunked.trailer.part # Unfortunately, the list of forbidden fields is long and vague :-/ class EndOfMessage(_EventBundle): """The end of an HTTP message. Fields: .. attribute:: headers Default value: ``[]`` Any trailing headers attached to this message, represented as a list of (name, value) pairs. See :ref:`the header normalization rules <headers-format>` for details. Must be empty unless ``Transfer-Encoding: chunked`` is in use. """ _fields = ["headers"] _defaults = {"headers": []} class ConnectionClosed(_EventBundle): """This event indicates that the sender has closed their outgoing connection. Note that this does not necessarily mean that they can't *receive* further data, because TCP connections are composed to two one-way channels which can be closed independently. See :ref:`closing` for details. No fields. """ pass
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/vendor/requests_async/http3/h11/_events.py
0.780244
0.191781
_events.py
pypi
import re from ._util import LocalProtocolError, RemoteProtocolError, validate from ._state import * from ._events import * from ._abnf import header_field, request_line, status_line, chunk_header __all__ = ["READERS"] header_field_re = re.compile(header_field.encode("ascii")) # Remember that this has to run in O(n) time -- so e.g. the bytearray cast is # critical. obs_fold_re = re.compile(br"[ \t]+") def _obsolete_line_fold(lines): it = iter(lines) last = None for line in it: match = obs_fold_re.match(line) if match: if last is None: raise LocalProtocolError( "continuation line at start of headers") if not isinstance(last, bytearray): last = bytearray(last) last += b" " last += line[match.end():] else: if last is not None: yield last last = line if last is not None: yield last def _decode_header_lines(lines): for line in _obsolete_line_fold(lines): # _obsolete_line_fold yields either bytearray or bytes objects. On # Python 3, validate() takes either and returns matches as bytes. But # on Python 2, validate can return matches as bytearrays, so we have # to explicitly cast back. matches = validate(header_field_re, bytes(line)) yield (matches["field_name"], matches["field_value"]) request_line_re = re.compile(request_line.encode("ascii")) def maybe_read_from_IDLE_client(buf): lines = buf.maybe_extract_lines() if lines is None: return None if not lines: raise LocalProtocolError("no request line received") matches = validate(request_line_re, lines[0]) return Request(headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches) status_line_re = re.compile(status_line.encode("ascii")) def maybe_read_from_SEND_RESPONSE_server(buf): lines = buf.maybe_extract_lines() if lines is None: return None if not lines: raise LocalProtocolError("no response line received") matches = validate(status_line_re, lines[0]) # Tolerate missing reason phrases if matches["reason"] is None: matches["reason"] = b"" status_code = matches["status_code"] = int(matches["status_code"]) class_ = InformationalResponse if status_code < 200 else Response return class_(headers=list(_decode_header_lines(lines[1:])), _parsed=True, **matches) class ContentLengthReader: def __init__(self, length): self._length = length self._remaining = length def __call__(self, buf): if self._remaining == 0: return EndOfMessage() data = buf.maybe_extract_at_most(self._remaining) if data is None: return None self._remaining -= len(data) return Data(data=data) def read_eof(self): raise RemoteProtocolError( "peer closed connection without sending complete message body " "(received {} bytes, expected {})" .format(self._length - self._remaining, self._length)) chunk_header_re = re.compile(chunk_header.encode("ascii")) class ChunkedReader(object): def __init__(self): self._bytes_in_chunk = 0 # After reading a chunk, we have to throw away the trailing \r\n; if # this is >0 then we discard that many bytes before resuming regular # de-chunkification. self._bytes_to_discard = 0 self._reading_trailer = False def __call__(self, buf): if self._reading_trailer: lines = buf.maybe_extract_lines() if lines is None: return None return EndOfMessage(headers=list(_decode_header_lines(lines))) if self._bytes_to_discard > 0: data = buf.maybe_extract_at_most(self._bytes_to_discard) if data is None: return None self._bytes_to_discard -= len(data) if self._bytes_to_discard > 0: return None # else, fall through and read some more assert self._bytes_to_discard == 0 if self._bytes_in_chunk == 0: # We need to refill our chunk count chunk_header = buf.maybe_extract_until_next(b"\r\n") if chunk_header is None: return None matches = validate(chunk_header_re, chunk_header) # XX FIXME: we discard chunk extensions. Does anyone care? # We convert to bytes because Python 2's `int()` function doesn't # work properly on bytearray objects. self._bytes_in_chunk = int(bytes(matches["chunk_size"]), base=16) if self._bytes_in_chunk == 0: self._reading_trailer = True return self(buf) chunk_start = True else: chunk_start = False assert self._bytes_in_chunk > 0 data = buf.maybe_extract_at_most(self._bytes_in_chunk) if data is None: return None self._bytes_in_chunk -= len(data) if self._bytes_in_chunk == 0: self._bytes_to_discard = 2 chunk_end = True else: chunk_end = False return Data(data=data, chunk_start=chunk_start, chunk_end=chunk_end) def read_eof(self): raise RemoteProtocolError( "peer closed connection without sending complete message body " "(incomplete chunked read)") class Http10Reader(object): def __call__(self, buf): data = buf.maybe_extract_at_most(999999999) if data is None: return None return Data(data=data) def read_eof(self): return EndOfMessage() def expect_nothing(buf): if buf: raise LocalProtocolError("Got data when expecting EOF") return None READERS = { (CLIENT, IDLE): maybe_read_from_IDLE_client, (SERVER, IDLE): maybe_read_from_SEND_RESPONSE_server, (SERVER, SEND_RESPONSE): maybe_read_from_SEND_RESPONSE_server, (CLIENT, DONE): expect_nothing, (CLIENT, MUST_CLOSE): expect_nothing, (CLIENT, CLOSED): expect_nothing, (SERVER, DONE): expect_nothing, (SERVER, MUST_CLOSE): expect_nothing, (SERVER, CLOSED): expect_nothing, SEND_BODY: { "chunked": ChunkedReader, "content-length": ContentLengthReader, "http/1.0": Http10Reader, }, }
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/vendor/requests_async/http3/h11/_readers.py
0.50415
0.180323
_readers.py
pypi
import io import typing from ..config import CertTypes, TimeoutTypes, VerifyTypes from ..interfaces import Dispatcher from ..models import Request, Response class WSGIDispatch(Dispatcher): """ A custom dispatcher that handles sending requests directly to an ASGI app. The simplest way to use this functionality is to use the `app`argument. This will automatically infer if 'app' is a WSGI or an ASGI application, and will setup an appropriate dispatch class: ``` client = http3.Client(app=app) ``` Alternatively, you can setup the dispatch instance explicitly. This allows you to include any additional configuration arguments specific to the WSGIDispatch class: ``` dispatch = http3.WSGIDispatch( app=app, script_name="/submount", remote_addr="1.2.3.4" ) client = http3.Client(dispatch=dispatch) ``` """ def __init__( self, app: typing.Callable, raise_app_exceptions: bool = True, script_name: str = "", remote_addr: str = "127.0.0.1", ) -> None: self.app = app self.raise_app_exceptions = raise_app_exceptions self.script_name = script_name self.remote_addr = remote_addr def send( self, request: Request, verify: VerifyTypes = None, cert: CertTypes = None, timeout: TimeoutTypes = None, ) -> Response: environ = { "wsgi.version": (1, 0), "wsgi.url_scheme": request.url.scheme, "wsgi.input": BodyStream(request.stream()), "wsgi.errors": io.BytesIO(), "wsgi.multithread": True, "wsgi.multiprocess": False, "wsgi.run_once": False, "REQUEST_METHOD": request.method, "SCRIPT_NAME": self.script_name, "PATH_INFO": request.url.path, "QUERY_STRING": request.url.query, "SERVER_NAME": request.url.host, "SERVER_PORT": str(request.url.port), "REMOTE_ADDR": self.remote_addr, } for key, value in request.headers.items(): key = key.upper().replace("-", "_") if key not in ("CONTENT_TYPE", "CONTENT_LENGTH"): key = "HTTP_" + key environ[key] = value seen_status = None seen_response_headers = None seen_exc_info = None def start_response( status: str, response_headers: list, exc_info: typing.Any = None ) -> None: nonlocal seen_status, seen_response_headers, seen_exc_info seen_status = status seen_response_headers = response_headers seen_exc_info = exc_info result = self.app(environ, start_response) assert seen_status is not None assert seen_response_headers is not None if seen_exc_info and self.raise_app_exceptions: raise seen_exc_info[1] return Response( status_code=int(seen_status.split()[0]), protocol="HTTP/1.1", headers=seen_response_headers, content=(chunk for chunk in result), on_close=getattr(result, "close", None), ) class BodyStream(io.RawIOBase): def __init__(self, iterator: typing.Iterator[bytes]) -> None: self._iterator = iterator self._buffer = b"" self._closed = False def read(self, size: int = -1) -> bytes: if self._closed: return b"" if size == -1: return self.readall() try: while len(self._buffer) < size: self._buffer += next(self._iterator) except StopIteration: self._closed = True return self._buffer output = self._buffer[:size] self._buffer = self._buffer[size:] return output def readall(self) -> bytes: if self._closed: raise OSError("Stream closed") # pragma: nocover for chunk in self._iterator: self._buffer += chunk self._closed = True return self._buffer def readinto(self, b: bytearray) -> typing.Optional[int]: # pragma: nocover output = self.read(len(b)) count = len(output) b[:count] = output return count def write(self, b: bytes) -> int: raise OSError("Operation not supported") # pragma: nocover def fileno(self) -> int: raise OSError("Operation not supported") # pragma: nocover def seek(self, offset: int, whence: int = 0) -> int: raise OSError("Operation not supported") # pragma: nocover def truncate(self, size: int = None) -> int: raise OSError("Operation not supported") # pragma: nocover
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/vendor/requests_async/http3/dispatch/wsgi.py
0.702122
0.515376
wsgi.py
pypi
from ..config import CertTypes, TimeoutTypes, VerifyTypes from ..interfaces import AsyncDispatcher, ConcurrencyBackend, Dispatcher from ..models import ( AsyncRequest, AsyncRequestData, AsyncResponse, AsyncResponseContent, Request, RequestData, Response, ResponseContent, ) class ThreadedDispatcher(AsyncDispatcher): """ The ThreadedDispatcher class is used to mediate between the Client (which always uses async under the hood), and a synchronous `Dispatch` class. """ def __init__(self, dispatch: Dispatcher, backend: ConcurrencyBackend) -> None: self.sync_dispatcher = dispatch self.backend = backend async def send( self, request: AsyncRequest, verify: VerifyTypes = None, cert: CertTypes = None, timeout: TimeoutTypes = None, ) -> AsyncResponse: concurrency_backend = self.backend data = getattr(request, "content", getattr(request, "content_aiter", None)) sync_data = self._sync_request_data(data) sync_request = Request( method=request.method, url=request.url, headers=request.headers, data=sync_data, ) func = self.sync_dispatcher.send kwargs = { "request": sync_request, "verify": verify, "cert": cert, "timeout": timeout, } sync_response = await self.backend.run_in_threadpool(func, **kwargs) assert isinstance(sync_response, Response) content = getattr( sync_response, "_raw_content", getattr(sync_response, "_raw_stream", None) ) async_content = self._async_response_content(content) async def async_on_close() -> None: nonlocal concurrency_backend, sync_response await concurrency_backend.run_in_threadpool(sync_response.close) return AsyncResponse( status_code=sync_response.status_code, protocol=sync_response.protocol, headers=sync_response.headers, content=async_content, on_close=async_on_close, request=request, history=sync_response.history, ) async def close(self) -> None: """ The `.close()` method runs the `Dispatcher.close()` within a threadpool, so as not to block the async event loop. """ func = self.sync_dispatcher.close await self.backend.run_in_threadpool(func) def _async_response_content(self, content: ResponseContent) -> AsyncResponseContent: if isinstance(content, bytes): return content # Coerce an async iterator into an iterator, with each item in the # iteration run within the event loop. assert hasattr(content, "__iter__") return self.backend.iterate_in_threadpool(content) def _sync_request_data(self, data: AsyncRequestData) -> RequestData: if isinstance(data, bytes): return data return self.backend.iterate(data)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/vendor/requests_async/http3/dispatch/threaded.py
0.803868
0.195076
threaded.py
pypi
__all__ = [ "get_bond_analytics", "get_option_analytics", "get_swap_analytics", "get_cds_analytics", "get_cross_analytics", "get_repo_analytics", "get_capfloor_analytics", "get_swaption_analytics", "get_term_deposit_analytics", "get_surface", "get_curve", ] from .contracts._financial_contracts import FinancialContracts from .surface._surfaces_class import Surfaces from .curve._curves_class import Curves def get_instrument_analytics( universe, fields=None, calculation_params=None, outputs=None, on_response=None, closure=None, session=None, ): from refinitiv.dataplatform.factory.content_factory import ContentFactory _fin = FinancialContracts(session=session, on_response=on_response) result = _fin.get_instrument_analytics( universe=universe, fields=fields, calculation_params=calculation_params, outputs=outputs, closure=closure, ) ContentFactory._last_result = result if result.is_success and result.data and result.data.df is not None: return result.data.df else: ContentFactory._last_error_status = result.status return None def get_bond_analytics( universe, fields=None, calculation_params=None, outputs=None, ): """ This function describes the properties that you can use a request to price a Bond contract. Parameters ---------- universe: str, list, object contains the list of Bond Futures you want to price. fields: list, optional contains the list of Analytics that the quantitative analytic service will compute. calculation_params: object, optional contains the properties that may be used to control the calculation. outputs: list, optional contains the sections that will be returned by the API Returns ------- Dataframe or None Dataframe if successful, None otherwise. Examples -------- >>> import refinitiv.dataplatform as rdp >>> from refinitiv.dataplatform.content import ipa >>> df = rdp.get_bond_analytics( ... universe=[ ... "US1YT=RR", ... "US5YT=RR", ... "US10YT=RR" ... ], ... fields=[ ... "InstrumentCode", ... "MarketDataDate", ... "YieldPercent", ... "GovernmentSpreadBp", ... "GovCountrySpreadBp", ... "RatingSpreadBp", ... "SectorRatingSpreadBp", ... "EdsfSpreadBp", ... "IssuerSpreadBp" ... ], ... calculation_params=ipa.bond.CalculationParams( ... valuation_date="2019-07-05", ... price_side=ipa.enum_types.PriceSide.BID ... ) ...) """ result = FinancialContracts.get_bond_analytics( universe=universe, fields=fields, calculation_params=calculation_params, outputs=outputs, ) from refinitiv.dataplatform.factory.content_factory import ContentFactory ContentFactory._last_result = result if result.is_success and result.data and result.data.df is not None: return result.data.df else: ContentFactory._last_error_status = result.status return None def get_option_analytics( universe, fields=None, calculation_params=None, outputs=None, ): """ This function describes the properties that you can use a request to get the results for an Option contract. Parameters ---------- universe: str, list, object contains the list of Option you want to price. fields: list, optional contains the list of Analytics that the quantitative analytic service will compute. calculation_params: object, optional contains the properties that may be used to control the calculation. outputs: list, optional contains the sections that will be returned by the API Returns ------- Dataframe or None Dataframe if successful, None otherwise. Examples -------- >>> import refinitiv.dataplatform as rdp >>> df = rdp.get_option_analytics( ... universe=rdp.ipa.option.Definition( ... instrument_code="FCHI560000L1.p", ... underlying_type=ipa.financial_contracts.option.UnderlyingType.ETI ... ), ... outputs=[ ... "Data", ... "Headers" ... ], ... fields=[ ... "MarketValueInDealCcy", ... "DeltaPercent", ... "GammaPercent", ... "RhoPercent", ... "ThetaPercent", ... "VegaPercent" ... ] ... ) """ from refinitiv.dataplatform.factory.content_factory import ContentFactory result = FinancialContracts.get_option_analytics( universe=universe, fields=fields, calculation_params=calculation_params, outputs=outputs, ) ContentFactory._last_result = result if result.is_success and result.data and result.data.df is not None: return result.data.df else: ContentFactory._last_error_status = result.status return None def get_swap_analytics( universe, fields=None, calculation_params=None, outputs=None, ): """ This function describes the properties that you can use a request to price an Interest Rate Swap contract. Parameters ---------- universe: str, list, object contains the list of IR Swap you want to price. fields: list, optional contains the list of Analytics that the quantitative analytic service will compute. calculation_params: object, optional contains the properties that may be used to control the calculation. outputs: list, optional contains the sections that will be returned by the API Returns ------- Dataframe or None Dataframe if successful, None otherwise. Examples -------- >>> import refinitiv.dataplatform as rdp >>> from refinitiv.dataplatform.content import ipa >>> from refinitiv.dataplatform.content.ipa.enum_types import * >>> from refinitiv.dataplatform.content.ipa import AmortizationItem >>> df = rdp.get_swap_analytics( ... universe=ipa.swap.Definition( ... instrument_tag="user-defined GBP IRS", ... start_date="2019-05-21T00:00:00Z", ... tenor="10Y", ... legs=[ ... ipa.swap.LegDefinition( ... direction=Direction.PAID, ... notional_amount="10000000", ... notional_ccy="GBP", ... interest_type=InterestType.FIXED, ... interest_payment_frequency=Frequency.ANNUAL, ... interest_calculation_method=DayCountBasis.DCB_30_360, ... payment_business_day_convention=BusinessDayConvention.MODIFIED_FOLLOWING, ... payment_roll_convention=DateRollingConvention.SAME, ... payment_business_days="UKG", ... amortization_schedule=[ ... AmortizationItem( ... remaining_notional=200000, ... amortization_frequency=AmortizationFrequency.EVERY_COUPON, ... amortization_type=AmortizationType.LINEAR ... ) ... ] ... ), ... ipa.swap.LegDefinition( ... direction=Direction.RECEIVED, ... notional_amount="10000000", ... notional_ccy="GBP", ... interest_type=InterestType.FLOAT, ... interest_payment_frequency=Frequency.SEMI_ANNUAL, ... index_reset_frequency=Frequency.SEMI_ANNUAL, ... interest_calculation_method=DayCountBasis.DCB_ACTUAL_360, ... payment_business_day_convention=BusinessDayConvention.MODIFIED_FOLLOWING, ... payment_roll_convention=DateRollingConvention.SAME, ... payment_business_days="UKG", ... spread_bp=20, ... index_name="LIBOR", ... index_tenor="6M", ... index_reset_type=IndexResetType.IN_ADVANCE, ... amortization_schedule=[ ... AmortizationItem( ... remaining_notional=200000, ... amortization_frequency=AmortizationFrequency.EVERY2ND_COUPON, ... amortization_type=AmortizationType.LINEAR ... ) ... ] ... ) ... ] ... ), ... calculation_params=ipa.swap.CalculationParams(discounting_tenor="ON"), ... fields=[ ... "InstrumentTag", ... "InstrumentDescription", ... "FixedRate", ... "MarketValueInDealCcy", ... "PV01Bp", ... "DiscountCurveName", ... "ForwardCurveName", ... "CashFlowDatesArray", ... "CashFlowTotalAmountsInDealCcyArray", ... "CashFlowDiscountFactorsArray", ... "CashFlowResidualAmountsInDealCcyArray", ... "ErrorMessage" ... ], ... outputs=[ ... "Data", ... "Headers" ... ] ... ) """ from refinitiv.dataplatform.factory.content_factory import ContentFactory result = FinancialContracts.get_swap_analytics( universe=universe, fields=fields, calculation_params=calculation_params, outputs=outputs, ) if result.is_success and result.data and result.data.df is not None: retval = result.data.df else: ContentFactory._last_error_status = result.status retval = None ContentFactory._last_result = result return retval def get_cds_analytics( universe, fields=None, calculation_params=None, outputs=None, ): """ This function describes the properties that you can use a request to price a Credit Default Swap (CDS) contract. Parameters ---------- universe: str, list, object contains the list of CDS you want to price. fields: list, optional contains the list of Analytics that the quantitative analytic service will compute. calculation_params: object, optional contains the properties that may be used to control the calculation. outputs: list, optional contains the sections that will be returned by the API Returns ------- Dataframe or None Dataframe if successful, None otherwise. Examples -------- >>> import refinitiv.dataplatform as rdp >>> from refinitiv.dataplatform.content import ipa >>> from refinitiv.dataplatform.content.ipa.enum_types import * >>> from refinitiv.dataplatform.content.ipa import AmortizationItem >>> df = rdp.get_cds_analytics( ... universe=rdp.ipa.cds.Definition( ... instrument_tag="Cds1_InstrumentCode", ... instrument_code="BNPP5YEUAM=R", ... cds_convention=rdp.ipa.enum_types.CdsConvention.ISDA, ... trade_date="2019-05-21T00:00:00Z", ... step_in_date="2019-05-22T00:00:00Z", ... start_date="2019-05-20T00:00:00Z", ... end_date_moving_convention=rdp.ipa.enum_types.BusinessDayConvention.NO_MOVING, ... adjust_to_isda_end_date=True, ... ), ... calculation_params=rdp.ipa.cds.CalculationParams( ... market_data_date="2020-01-01" ... ), ... outputs=[ ... "Data", ... "Headers" ... ], ... fields=[ ... "InstrumentTag", ... "ValuationDate", ... "InstrumentDescription", ... "StartDate", ... "EndDate", ... "SettlementDate", ... "UpfrontAmountInDealCcy", ... "CashAmountInDealCcy", ... "AccruedAmountInDealCcy", ... "AccruedBeginDate", ... "NextCouponDate", ... "UpfrontPercent", ... "ConventionalSpreadBp", ... "ParSpreadBp", ... "AccruedDays", ... "ErrorCode", ... "ErrorMessage" ... ] ...) """ from refinitiv.dataplatform.factory.content_factory import ContentFactory result = FinancialContracts.get_cds_analytics( universe=universe, fields=fields, calculation_params=calculation_params, outputs=outputs, ) if result.is_success and result.data and result.data.df is not None: retval = result.data.df else: ContentFactory._last_error_status = result.status retval = None ContentFactory._last_result = result return retval def get_cross_analytics( universe, fields=None, calculation_params=None, outputs=None, ): """ This function describes the properties that you can use a request to price a FX Cross contract. Parameters ---------- universe: str, list, object contains the list of FX Cross contract you want to price. fields: list, optional contains the list of Analytics that the quantitative analytic service will compute. calculation_params: object, optional contains the properties that may be used to control the calculation. outputs: list, optional contains the sections that will be returned by the API Returns ------- Dataframe or None Dataframe if successful, None otherwise. Examples -------- >>> import refinitiv.dataplatform as rdp >>> from refinitiv.dataplatform.content import ipa >>> from refinitiv.dataplatform.content.ipa.enum_types import * >>> from refinitiv.dataplatform.content.ipa import AmortizationItem >>> df = rdp.get_cross_analytics( ... universe=[ ... ipa.cross.Definition( ... fx_cross_type=ipa.enum_types.FxCrossType.FX_NON_DELIVERABLE_FORWARD, ... fx_cross_code="USDINR", ... legs=[ ... ipa.cross.LegDefinition( ... deal_amount=1000000, ... contra_amount=65762500, ... deal_ccy_buy_sell=ipa.enum_types.BuySell.BUY, ... tenor="4Y" ... ) ... ], ... ), ... ], ... calculation_params=ipa.cross.CalculationParams( ... valuation_date="2017-11-15T00:00:00Z" ... ), ... fields=[ ... "ValuationDate", ... "InstrumentDescription", ... "EndDate", ... "FxSwapsCcy1Ccy2", ... "MarketValueInReportCcy", ... "DeltaAmountInReportCcy", ... "RhoContraCcyAmountInReportCcy", ... "RhoDealCcyAmountInReportCcy" ... ], ... outputs=[ ... "Data", ... "Headers" ... ] ...) """ from refinitiv.dataplatform.factory.content_factory import ContentFactory result = FinancialContracts.get_cross_analytics( universe=universe, fields=fields, calculation_params=calculation_params, outputs=outputs, ) if result.is_success and result.data and result.data.df is not None: retval = result.data.df else: ContentFactory._last_error_status = result.status retval = None ContentFactory._last_result = result return retval def get_repo_analytics( universe, fields=None, calculation_params=None, outputs=None, ): """ This function describes the properties that you can use a request to get the results for a Repo contract. Parameters ---------- universe: str, list, object contains the list of Repo definitions. fields: list, optional contains the list of Analytics that the quantitative analytic service will compute. calculation_params: object, optional contains the properties that may be used to control the calculation. outputs: list, optional contains the sections that will be returned by the API Returns ------- Dataframe or None Dataframe if successful, None otherwise. Examples -------- >>> import refinitiv.dataplatform as rdp >>> from refinitiv.dataplatform.content import ipa >>> from refinitiv.dataplatform.content.ipa.enum_types import * >>> df = rdp.get_repo_analytics( ... universe=rdp.ipa.repo.Definition( ... start_date="2019-11-27", ... tenor="1M", ... underlying_instruments=[ ... rdp.ipa.repo.UnderlyingContract( ... instrument_type="Bond", ... instrument_definition=ipa.bond.Definition( ... instrument_code="US191450264=" ... ) ... ) ... ] ... ), ... calculation_params=rdp.ipa.repo.CalculationParams( ... market_data_date="2019-11-25" ... ) ... ) """ from refinitiv.dataplatform.factory.content_factory import ContentFactory result = FinancialContracts.get_repo_analytics( universe=universe, fields=fields, calculation_params=calculation_params, outputs=outputs, ) if result.is_success and result.data and result.data.df is not None: retval = result.data.df else: ContentFactory._last_error_status = result.status retval = None ContentFactory._last_result = result return retval def get_capfloor_analytics( universe, fields=None, calculation_params=None, outputs=None, ): """ This function describes the properties that you can use a request to get the results for a Cap Floor contract. Parameters ---------- universe: str, list, object contains the list of Cap Floor definitions. fields: list, optional contains the list of Analytics that the quantitative analytic service will compute. calculation_params: object, optional contains the properties that may be used to control the calculation. outputs: list, optional contains the sections that will be returned by the API Returns ------- Dataframe or None Dataframe if successful, None otherwise. Examples -------- >>> import refinitiv.dataplatform as rdp >>> from refinitiv.dataplatform.content import ipa >>> from refinitiv.dataplatform.content.ipa.enum_types import * >>> from refinitiv.dataplatform.content.ipa import AmortizationItem >>> df = rdp.get_capfloor_analytics( ... universe=rdp.ipa.capfloor.Definition( ... notional_ccy="EUR", ... start_date="2019-02-11", ... amortization_schedule=[ ... AmortizationItem( ... start_date="2021-02-11", ... end_date="2022-02-11", ... amount=100000, ... amortization_type="Schedule" ... ), ... AmortizationItem( ... start_date="2022-02-11", ... end_date="2023-02-11", ... amount=-100000, ... amortization_type="Schedule" ... ), ... ], ... tenor="5Y", ... buy_sell="Sell", ... notional_amount=10000000, ... interest_payment_frequency="Monthly", ... cap_strike_percent=1 ... ), ... calculation_params=rdp.ipa.capfloor.CalculationParams( ... skip_first_cap_floorlet=True, ... valuation_date="2020-02-07" ... ), ... fields=[ ... "InstrumentTag", ... "InstrumentDescription", ... "FixedRate", ... "MarketValueInDealCcy", ... "MarketValueInReportCcy", ... "ErrorMessage" ... ], ... outputs=[ ... "Data", ... "Headers" ... ] ...) """ result = FinancialContracts.get_capfloor_analytics( universe=universe, fields=fields, calculation_params=calculation_params, outputs=outputs, ) from refinitiv.dataplatform.factory.content_factory import ContentFactory if result.is_success and result.data and result.data.df is not None: retval = result.data.df else: ContentFactory._last_error_status = result.status retval = None ContentFactory._last_result = result return retval def get_swaption_analytics( universe, fields=None, calculation_params=None, outputs=None, ): """ This function describes the properties that you can use a request to get the results for a Swaption contract. Parameters ---------- universe: str, list, object contains the list of Swaption definitions. fields: list, optional contains the list of Analytics that the quantitative analytic service will compute. calculation_params: object, optional contains the properties that may be used to control the calculation. outputs: list, optional contains the sections that will be returned by the API Returns ------- Dataframe or None Dataframe if successful, None otherwise. Examples -------- >>> import refinitiv.dataplatform as rdp >>> from refinitiv.dataplatform.content import ipa >>> from refinitiv.dataplatform.content.ipa.enum_types import * >>> from refinitiv.dataplatform.content.ipa.contracts.swap import Definition as SwapDefinition >>> from refinitiv.dataplatform.content.ipa.contracts.swaption import * >>> df = rdp.get_swaption_analytics( ... universe=ipa.swaption.Definition( ... instrument_tag="BermudanEURswaption", ... settlement_type=SwaptionSettlementType.CASH, ... tenor="7Y", ... strike_percent=2.75, ... buy_sell=BuySell.BUY, ... call_put=CallPut.CALL, ... exercise_style=ExerciseStyle.BERM, ... bermudan_swaption_definition=BermudanSwaptionDefinition( ... exercise_schedule_type=ExerciseScheduleType.FLOAT_LEG, ... notification_days=0 ... ), ... underlying_definition=SwapDefinition( ... tenor="3Y", ... template="EUR_AB6E" ... ) ... ), ... calculation_params=ipa.swaption.CalculationParams(valuation_date="2020-04-24", nb_iterations=80), ... outputs=[ ... "Data", ... "Headers", ... "MarketData" ... ] ...) """ result = FinancialContracts.get_swaption_analytics( universe=universe, fields=fields, calculation_params=calculation_params, outputs=outputs, ) from refinitiv.dataplatform.factory.content_factory import ContentFactory if result.is_success and result.data and result.data.df is not None: retval = result.data.df else: ContentFactory._last_error_status = result.status retval = None ContentFactory._last_result = result return retval def get_term_deposit_analytics( universe, fields=None, calculation_params=None, outputs=None, ): """ This function describes the properties that you can use a request to get the results for a Term Deposits contract. Parameters ---------- universe: str, list, object contains the list of Term Deposits definitions. fields: list, optional contains the list of Analytics that the quantitative analytic service will compute. calculation_params: object, optional contains the properties that may be used to control the calculation. outputs: list, optional contains the sections that will be returned by the API Returns ------- Dataframe or None Dataframe if successful, None otherwise. Examples -------- >>> import refinitiv.dataplatform as rdp >>> from refinitiv.dataplatform.content import ipa >>> from refinitiv.dataplatform.content.ipa.enum_types import * >>> from refinitiv.dataplatform.content.ipa.contracts.swap import Definition as SwapDefinition >>> from refinitiv.dataplatform.content.ipa.contracts.swaption import * >>> df = rdp.get_term_deposit_analytics( ... universe=ipa.term_deposit.Definition( ... instrument_tag="AED_AM1A", ... tenor="5Y", ... notional_ccy="GBP" ... ), ... calculation_params=ipa.term_deposit.CalculationParams(valuation_date="2018-01-10T00:00:00Z"), ... fields=[ ... "InstrumentTag", ... "InstrumentDescription", ... "FixedRate", ... "MarketValueInDealCcy", ... "MarketValueInReportCcy", ... "ErrorMessage" ... ], ... outputs=[ ... "Data", ... "Headers" ... ] ... ) """ result = FinancialContracts.get_term_deposit_analytics( universe=universe, fields=fields, calculation_params=calculation_params, outputs=outputs, ) from refinitiv.dataplatform.factory.content_factory import ContentFactory if result.is_success and result.data and result.data.df is not None: retval = result.data.df else: ContentFactory._last_error_status = { "error_code": result.error_code, "error_message": result.error_message, } retval = None ContentFactory._last_result = result return retval def get_surface( universe, outputs=None, ): """ The Volatility Surfaces API provides you with an easy way to: - Compute the volatility level for a specific expiry and strike. - Derive volatility slices based on specific strikes or expiries. - Analyze the volatility surface of an asset. To compute a volatility surface, all you need to do is define the underlying instrument. For more advanced usage, you can easily apply different calculation parameters or adjust the surface layout to match your needs. Parameters ---------- universe: list, object contains the list of Surface definitions. outputs: list, optional these values will be distributed depending on the available input data and the type of volatility. Returns ------- Dataframe or None Dataframe if successful, None otherwise. Examples -------- >>> import refinitiv.dataplatform as rdp >>> from refinitiv.dataplatform.content import ipa >>> df = ipa.get_surface( ... universe=[ ... ipa.surface.eti.Definition( ... tag="1", ... instrument_code="BNPP.PA@RIC", ... calculation_params=ipa.surface.eti.CalculationParams( ... price_side=ipa.enum_types.PriceSide.MID, ... volatility_model=ipa.enum_types.VolatilityModel.SVI, ... x_axis=ipa.enum_types.Axis.DATE, ... y_axis=ipa.enum_types.Axis.STRIKE, ... ), ... layout=ipa.surface.SurfaceOutput( ... format=ipa.enum_types.Format.MATRIX, ... y_point_count=10 ... ), ... ), ... ipa.surface.eti.Definition( ... tag="222", ... instrument_code="BNPP.PA@RIC", ... calculation_params=ipa.surface.eti.CalculationParams( ... price_side=ipa.enum_types.PriceSide.MID, ... volatility_model=ipa.enum_types.VolatilityModel.SVI, ... x_axis=ipa.enum_types.Axis.DATE, ... y_axis=ipa.enum_types.Axis.STRIKE, ... ), ... layout=ipa.surface.SurfaceOutput( ... format=ipa.enum_types.Format.MATRIX, ... y_point_count=10 ... ), ... ) ... ] ...) """ result = Surfaces.get_surface( universe=universe, outputs=outputs, ) from refinitiv.dataplatform.factory.content_factory import ContentFactory if result.is_success and result.data and result.data.df is not None: retval = result.data.df else: ContentFactory._last_error_status = result.status retval = None ContentFactory._last_result = result return retval def get_curve( universe, outputs=None, ): """ Parameters ---------- universe: str, list, object contains the list of Curve definitions. outputs: list, optional contains the sections that will be returned by the API Returns ------- Dataframe or None Dataframe if successful, None otherwise. Examples -------- >>> import refinitiv.dataplatform as rdp >>> from refinitiv.dataplatform.content import ipa >>> response = ipa.curve.Curves().get_curve( ... universe=[ ... ipa.curve.ForwardCurve( ... curve_definition=ipa.curve.SwapZcCurveDefinition( ... currency="EUR", ... index_name="EURIBOR", ... discounting_tenor="OIS" ... ), ... forward_curve_definitions=[ ... ipa.curve.ForwardCurveDefinition( ... index_tenor="3M", ... forward_curve_tag="ForwardTag", ... forward_start_date="2021-02-01", ... forward_curve_tenors=[ ... "0D", ... "1D", ... "2D", ... "3M", ... "6M", ... "9M", ... "1Y", ... "2Y", ... "3Y", ... "4Y", ... "5Y", ... "6Y", ... "7Y", ... "8Y", ... "9Y", ... "10Y", ... "15Y", ... "20Y", ... "25Y" ... ] ... ) ... ] ... ) ... ], ... outputs=[ ... "Constituents" ... ] ...) """ result = Curves.get_curve( universe=universe, outputs=outputs, ) from refinitiv.dataplatform.factory.content_factory import ContentFactory if result.is_success and result.data and result.data.df is not None: retval = result.data.df else: ContentFactory._last_error_status = result.status retval = None ContentFactory._last_result = result return retval
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/_functions.py
0.885403
0.311217
_functions.py
pypi
__all__ = ["BondRoundingParameters"] from ..instrument._definition import ObjectDefinition from ..enum_types.rounding_type import RoundingType from ..enum_types.rounding import Rounding class BondRoundingParameters(ObjectDefinition): def __init__( self, accrued_rounding=None, accrued_rounding_type=None, price_rounding=None, price_rounding_type=None, spread_rounding=None, spread_rounding_type=None, yield_rounding=None, yield_rounding_type=None, ): super().__init__() self.accrued_rounding = accrued_rounding self.accrued_rounding_type = accrued_rounding_type self.price_rounding = price_rounding self.price_rounding_type = price_rounding_type self.spread_rounding = spread_rounding self.spread_rounding_type = spread_rounding_type self.yield_rounding = yield_rounding self.yield_rounding_type = yield_rounding_type @property def accrued_rounding(self): """ Number of digits to apply for rounding of Accrued field. Available values are Zero, One, Two,..., Eight, Default, Unrounded. Optional. A default value may be defined in bond reference data, in that case it is used. If it is not the case no rounding is applied. :return: enum Rounding """ return self._get_enum_parameter(Rounding, "accruedRounding") @accrued_rounding.setter def accrued_rounding(self, value): self._set_enum_parameter(Rounding, "accruedRounding", value) @property def accrued_rounding_type(self): """ Type of rounding for accrued rounding. Optional. A default value can be defined in bond reference data. Otherwise, default value is Near. :return: enum RoundingType """ return self._get_enum_parameter(RoundingType, "accruedRoundingType") @accrued_rounding_type.setter def accrued_rounding_type(self, value): self._set_enum_parameter(RoundingType, "accruedRoundingType", value) @property def price_rounding(self): """ Number of digits to apply for price rounding. Available values are Zero, One, Two,..., Eight, Default, Unrounded. Optional. A default value may be defined in bond reference data, in that case it is used. If it is not the case no rounding is applied. :return: enum Rounding """ return self._get_enum_parameter(Rounding, "priceRounding") @price_rounding.setter def price_rounding(self, value): self._set_enum_parameter(Rounding, "priceRounding", value) @property def price_rounding_type(self): """ Type of rounding for price rounding. Optional. A default value can be defined in bond reference data. Otherwise, default value is Near. :return: enum RoundingType """ return self._get_enum_parameter(RoundingType, "priceRoundingType") @price_rounding_type.setter def price_rounding_type(self, value): self._set_enum_parameter(RoundingType, "priceRoundingType", value) @property def spread_rounding(self): """ Number of digits to apply for spread rounding. Available values are Zero, One, Two,..., Eight, Default, Unrounded. Note that spread rounding is done directly on the base point value. Optional. By default, data from the bond structure. :return: enum Rounding """ return self._get_enum_parameter(Rounding, "spreadRounding") @spread_rounding.setter def spread_rounding(self, value): self._set_enum_parameter(Rounding, "spreadRounding", value) @property def spread_rounding_type(self): """ :return: enum RoundingType """ return self._get_enum_parameter(RoundingType, "spreadRoundingType") @spread_rounding_type.setter def spread_rounding_type(self, value): self._set_enum_parameter(RoundingType, "spreadRoundingType", value) @property def yield_rounding(self): """ Number of digits to apply for yield rounding. Available values are Zero, One, Two,..., Eight, Default, Unrounded. Optional. A default value may be defined in bond reference data, in that case it is used. If it is not the case no rounding is applied. :return: enum Rounding """ return self._get_enum_parameter(Rounding, "yieldRounding") @yield_rounding.setter def yield_rounding(self, value): self._set_enum_parameter(Rounding, "yieldRounding", value) @property def yield_rounding_type(self): """ Type of rounding for yield rounding. Optional. A default value can be defined in bond reference data. Otherwise, default value is Near. :return: enum RoundingType """ return self._get_enum_parameter(RoundingType, "yieldRoundingType") @yield_rounding_type.setter def yield_rounding_type(self, value): self._set_enum_parameter(RoundingType, "yieldRoundingType", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/models/_bond_rounding_parameters.py
0.904687
0.345188
_bond_rounding_parameters.py
pypi
__all__ = ["ZcCurveMainParameters"] from ..instrument._definition import ObjectDefinition from ..enum_types.interpolation_mode import InterpolationMode from ..enum_types.extrapolation_mode import ExtrapolationMode from ..enum_types.price_side import PriceSide from ..enum_types.interest_calculation_method import InterestCalculationMethod from .step import Step from .turn import Turn from .zc import ZcCurveParameters from .convexity import ConvexityAdjustment class ZcCurveMainParameters(ObjectDefinition): def __init__( self, interest_calculation_method=None, convexity_adjustment=None, extrapolation_mode=None, interpolation_mode=None, pivot_curve_parameters=None, price_side=None, reference_curve_parameters=None, steps=None, turns=None, reference_tenor=None, use_convexity_adjustment=None, use_multi_dimensional_solver=None, use_steps=None, valuation_date=None, ): super().__init__() self.interest_calculation_method = interest_calculation_method self.convexity_adjustment = convexity_adjustment self.extrapolation_mode = extrapolation_mode self.interpolation_mode = interpolation_mode self.pivot_curve_parameters = pivot_curve_parameters self.price_side = price_side self.reference_curve_parameters = reference_curve_parameters self.steps = steps self.turns = turns self.reference_tenor = reference_tenor self.use_convexity_adjustment = use_convexity_adjustment self.use_multi_dimensional_solver = use_multi_dimensional_solver self.use_steps = use_steps self.valuation_date = valuation_date @property def convexity_adjustment(self): """ :return: object ConvexityAdjustment """ return self._get_object_parameter(ConvexityAdjustment, "convexityAdjustment") @convexity_adjustment.setter def convexity_adjustment(self, value): self._set_object_parameter(ConvexityAdjustment, "convexityAdjustment", value) @property def extrapolation_mode(self): """ None: no extrapolation Constant: constant extrapolation Linear: linear extrapolation :return: enum ExtrapolationMode """ return self._get_enum_parameter(ExtrapolationMode, "extrapolationMode") @extrapolation_mode.setter def extrapolation_mode(self, value): self._set_enum_parameter(ExtrapolationMode, "extrapolationMode", value) @property def interest_calculation_method(self): """ Day count basis of the calculated zero coupon rates. Default value is: Dcb_Actual_Actual :return: enum InterestCalculationMethod """ return self._get_enum_parameter( InterestCalculationMethod, "interestCalculationMethod" ) @interest_calculation_method.setter def interest_calculation_method(self, value): self._set_enum_parameter( InterestCalculationMethod, "interestCalculationMethod", value ) @property def interpolation_mode(self): """ Interpolation method used in swap zero curve bootstrap. Default value is: CubicSpline CubicDiscount: local cubic interpolation of discount factors CubicRate: local cubic interpolation of rates CubicSpline: a natural cubic spline Linear: linear interpolation Log: log linear interpolation ForwardMonotoneConvex :return: enum InterpolationMode """ return self._get_enum_parameter(InterpolationMode, "interpolationMode") @interpolation_mode.setter def interpolation_mode(self, value): self._set_enum_parameter(InterpolationMode, "interpolationMode", value) @property def pivot_curve_parameters(self): """ :return: object ZcCurveParameters """ return self._get_object_parameter(ZcCurveParameters, "pivotCurveParameters") @pivot_curve_parameters.setter def pivot_curve_parameters(self, value): self._set_object_parameter(ZcCurveParameters, "pivotCurveParameters", value) @property def price_side(self): """ Defines which data is used for the rate surface computation. Default value is: Mid :return: enum PriceSide """ return self._get_enum_parameter(PriceSide, "priceSide") @price_side.setter def price_side(self, value): self._set_enum_parameter(PriceSide, "priceSide", value) @property def reference_curve_parameters(self): """ referenceCurveParameters notes... :return: object ZcCurveParameters """ return self._get_object_parameter(ZcCurveParameters, "referenceCurveParameters") @reference_curve_parameters.setter def reference_curve_parameters(self, value): self._set_object_parameter(ZcCurveParameters, "referenceCurveParameters", value) @property def steps(self): """ Use to calculate the swap rate surface discount curve, when OIS is selected as discount curve. The steps can specify overnight index stepped dates or/and rates. :return: list Step """ return self._get_list_parameter(Step, "steps") @steps.setter def steps(self, value): self._set_list_parameter(Step, "steps", value) @property def turns(self): """ Used to include end period rates/turns when calculating swap rate surfaces :return: list Turn """ return self._get_list_parameter(Turn, "turns") @turns.setter def turns(self, value): self._set_list_parameter(Turn, "turns", value) @property def reference_tenor(self): """ Root tenor(s) for the xIbor dependencies :return: str """ return self._get_parameter("referenceTenor") @reference_tenor.setter def reference_tenor(self, value): self._set_parameter("referenceTenor", value) @property def use_convexity_adjustment(self): """ false / true Default value is: true. It indicates if the system needs to retrieve the convexity adjustment :return: bool """ return self._get_parameter("useConvexityAdjustment") @use_convexity_adjustment.setter def use_convexity_adjustment(self, value): self._set_parameter("useConvexityAdjustment", value) @property def use_multi_dimensional_solver(self): """ false / true Default value is: true. Specifies the use of the multi-dimensional solver for yield curve bootstrapping. This solving method is required because the bootstrapping method sometimes creates a ZC curve which does not accurately reprice the input instruments used to build it. The multi-dimensional solver is recommended when cubic interpolation methods are used in building the curve (in other cases, performance might be inferior to the regular bootstrapping method). - true: to use multi-dimensional solver for yield curve bootstrapping - false: not to use multi-dimensional solver for yield curve bootstrapping :return: bool """ return self._get_parameter("useMultiDimensionalSolver") @use_multi_dimensional_solver.setter def use_multi_dimensional_solver(self, value): self._set_parameter("useMultiDimensionalSolver", value) @property def use_steps(self): """ false / true Default value is: false. It indicates if the system needs to retrieve the overnight index stepped dates or/and rates :return: bool """ return self._get_parameter("useSteps") @use_steps.setter def use_steps(self, value): self._set_parameter("useSteps", value) @property def valuation_date(self): """ The valuation date Default value is the current date :return: str """ return self._get_parameter("valuationDate") @valuation_date.setter def valuation_date(self, value): self._set_parameter("valuationDate", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/curve/_zc_curve_main_parameters.py
0.868479
0.325655
_zc_curve_main_parameters.py
pypi
__all__ = ["SwapZcCurveParameters"] from ..instrument._definition import ObjectDefinition from ..enum_types.interpolation_mode import InterpolationMode from ..enum_types.extrapolation_mode import ExtrapolationMode from ..enum_types.price_side import PriceSide from ..enum_types.day_count_basis import DayCountBasis from ._step import Step from ._turn import Turn from ._convexity import ConvexityAdjustment class SwapZcCurveParameters(ObjectDefinition): def __init__( self, interest_calculation_method=None, convexity_adjustment=None, extrapolation_mode=None, interpolation_mode=None, price_side=None, steps=None, turns=None, reference_tenor=None, use_convexity_adjustment=None, use_multi_dimensional_solver=None, use_steps=None, valuation_date=None, ): super().__init__() self.interest_calculation_method = interest_calculation_method self.convexity_adjustment = convexity_adjustment self.extrapolation_mode = extrapolation_mode self.interpolation_mode = interpolation_mode self.price_side = price_side self.steps = steps self.turns = turns self.reference_tenor = reference_tenor self.use_convexity_adjustment = use_convexity_adjustment self.use_multi_dimensional_solver = use_multi_dimensional_solver self.use_steps = use_steps self.valuation_date = valuation_date @property def convexity_adjustment(self): """ :return: object ConvexityAdjustment """ return self._get_object_parameter(ConvexityAdjustment, "convexityAdjustment") @convexity_adjustment.setter def convexity_adjustment(self, value): self._set_object_parameter(ConvexityAdjustment, "convexityAdjustment", value) @property def extrapolation_mode(self): """ None: no extrapolation Constant: constant extrapolation Linear: linear extrapolation :return: enum ExtrapolationMode """ return self._get_enum_parameter(ExtrapolationMode, "extrapolationMode") @extrapolation_mode.setter def extrapolation_mode(self, value): self._set_enum_parameter(ExtrapolationMode, "extrapolationMode", value) @property def interest_calculation_method(self): """ Day count basis of the calculated zero coupon rates. Default value is: Dcb_Actual_Actual :return: enum InterestCalculationMethod """ return self._get_enum_parameter(DayCountBasis, "interestCalculationMethod") @interest_calculation_method.setter def interest_calculation_method(self, value): self._set_enum_parameter(DayCountBasis, "interestCalculationMethod", value) @property def interpolation_mode(self): """ Interpolation method used in swap zero curve bootstrap. Default value is: CubicSpline CubicDiscount: local cubic interpolation of discount factors CubicRate: local cubic interpolation of rates CubicSpline: a natural cubic spline Linear: linear interpolation Log: log linear interpolation ForwardMonotoneConvex :return: enum InterpolationMode """ return self._get_enum_parameter(InterpolationMode, "interpolationMode") @interpolation_mode.setter def interpolation_mode(self, value): self._set_enum_parameter(InterpolationMode, "interpolationMode", value) @property def price_side(self): """ Defines which data is used for the rate surface computation. Default value is: Mid :return: enum PriceSide """ return self._get_enum_parameter(PriceSide, "priceSide") @price_side.setter def price_side(self, value): self._set_enum_parameter(PriceSide, "priceSide", value) @property def steps(self): """ Use to calculate the swap rate surface discount curve, when OIS is selected as discount curve. The steps can specify overnight index stepped dates or/and rates. :return: list Step """ return self._get_list_parameter(Step, "steps") @steps.setter def steps(self, value): self._set_list_parameter(Step, "steps", value) @property def turns(self): """ Used to include end period rates/turns when calculating swap rate surfaces :return: list Turn """ return self._get_list_parameter(Turn, "turns") @turns.setter def turns(self, value): self._set_list_parameter(Turn, "turns", value) @property def reference_tenor(self): """ Root tenor(s) for the xIbor dependencies :return: str """ return self._get_parameter("referenceTenor") @reference_tenor.setter def reference_tenor(self, value): self._set_parameter("referenceTenor", value) @property def use_convexity_adjustment(self): """ false / true Default value is: true. It indicates if the system needs to retrieve the convexity adjustment :return: bool """ return self._get_parameter("useConvexityAdjustment") @use_convexity_adjustment.setter def use_convexity_adjustment(self, value): self._set_parameter("useConvexityAdjustment", value) @property def use_multi_dimensional_solver(self): """ false / true Default value is: true. Specifies the use of the multi-dimensional solver for yield curve bootstrapping. This solving method is required because the bootstrapping method sometimes creates a ZC curve which does not accurately reprice the input instruments used to build it. The multi-dimensional solver is recommended when cubic interpolation methods are used in building the curve (in other cases, performance might be inferior to the regular bootstrapping method). - true: to use multi-dimensional solver for yield curve bootstrapping - false: not to use multi-dimensional solver for yield curve bootstrapping :return: bool """ return self._get_parameter("useMultiDimensionalSolver") @use_multi_dimensional_solver.setter def use_multi_dimensional_solver(self, value): self._set_parameter("useMultiDimensionalSolver", value) @property def use_steps(self): """ false / true Default value is: false. It indicates if the system needs to retrieve the overnight index stepped dates or/and rates :return: bool """ return self._get_parameter("useSteps") @use_steps.setter def use_steps(self, value): self._set_parameter("useSteps", value) @property def valuation_date(self): """ The valuation date Default value is the current date :return: str """ return self._get_parameter("valuationDate") @valuation_date.setter def valuation_date(self, value): self._set_parameter("valuationDate", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/curve/_swap_zc_curve_parameters.py
0.909751
0.35248
_swap_zc_curve_parameters.py
pypi
__all__ = ["ZcCurveParameters"] from ..instrument._definition import ObjectDefinition from ..enum_types.interpolation_mode import InterpolationMode from ..enum_types.extrapolation_mode import ExtrapolationMode from ..enum_types.price_side import PriceSide from ..enum_types.interest_calculation_method import InterestCalculationMethod from .step import Step from .turn import Turn from .convexity import ConvexityAdjustment class ZcCurveParameters(ObjectDefinition): def __init__( self, interest_calculation_method=None, convexity_adjustment=None, extrapolation_mode=None, interpolation_mode=None, price_side=None, steps=None, turns=None, reference_tenor=None, use_convexity_adjustment=None, use_multi_dimensional_solver=None, use_steps=None, ): super().__init__() self.interest_calculation_method = interest_calculation_method self.convexity_adjustment = convexity_adjustment self.extrapolation_mode = extrapolation_mode self.interpolation_mode = interpolation_mode self.price_side = price_side self.steps = steps self.turns = turns self.reference_tenor = reference_tenor self.use_convexity_adjustment = use_convexity_adjustment self.use_multi_dimensional_solver = use_multi_dimensional_solver self.use_steps = use_steps @property def convexity_adjustment(self): """ :return: object ConvexityAdjustment """ return self._get_object_parameter(ConvexityAdjustment, "convexityAdjustment") @convexity_adjustment.setter def convexity_adjustment(self, value): self._set_object_parameter(ConvexityAdjustment, "convexityAdjustment", value) @property def extrapolation_mode(self): """ None: no extrapolation Constant: constant extrapolation Linear: linear extrapolation :return: enum ExtrapolationMode """ return self._get_enum_parameter(ExtrapolationMode, "extrapolationMode") @extrapolation_mode.setter def extrapolation_mode(self, value): self._set_enum_parameter(ExtrapolationMode, "extrapolationMode", value) @property def interest_calculation_method(self): """ Day count basis of the calculated zero coupon rates. Default value is: Dcb_Actual_Actual :return: enum InterestCalculationMethod """ return self._get_enum_parameter( InterestCalculationMethod, "interestCalculationMethod" ) @interest_calculation_method.setter def interest_calculation_method(self, value): self._set_enum_parameter( InterestCalculationMethod, "interestCalculationMethod", value ) @property def interpolation_mode(self): """ Interpolation method used in swap zero curve bootstrap. Default value is: CubicSpline CubicDiscount: local cubic interpolation of discount factors CubicRate: local cubic interpolation of rates CubicSpline: a natural cubic spline Linear: linear interpolation Log: log linear interpolation ForwardMonotoneConvex :return: enum InterpolationMode """ return self._get_enum_parameter(InterpolationMode, "interpolationMode") @interpolation_mode.setter def interpolation_mode(self, value): self._set_enum_parameter(InterpolationMode, "interpolationMode", value) @property def price_side(self): """ Defines which data is used for the rate surface computation. Default value is: Mid :return: enum PriceSide """ return self._get_enum_parameter(PriceSide, "priceSide") @price_side.setter def price_side(self, value): self._set_enum_parameter(PriceSide, "priceSide", value) @property def steps(self): """ Use to calculate the swap rate surface discount curve, when OIS is selected as discount curve. The steps can specify overnight index stepped dates or/and rates. :return: list Step """ return self._get_list_parameter(Step, "steps") @steps.setter def steps(self, value): self._set_list_parameter(Step, "steps", value) @property def turns(self): """ Used to include end period rates/turns when calculating swap rate surfaces :return: list Turn """ return self._get_list_parameter(Turn, "turns") @turns.setter def turns(self, value): self._set_list_parameter(Turn, "turns", value) @property def reference_tenor(self): """ Root tenor(s) for the xIbor dependencies :return: str """ return self._get_parameter("referenceTenor") @reference_tenor.setter def reference_tenor(self, value): self._set_parameter("referenceTenor", value) @property def use_convexity_adjustment(self): """ false / true Default value is: true. It indicates if the system needs to retrieve the convexity adjustment :return: bool """ return self._get_parameter("useConvexityAdjustment") @use_convexity_adjustment.setter def use_convexity_adjustment(self, value): self._set_parameter("useConvexityAdjustment", value) @property def use_multi_dimensional_solver(self): """ false / true Default value is: true. Specifies the use of the multi-dimensional solver for yield curve bootstrapping. This solving method is required because the bootstrapping method sometimes creates a ZC curve which does not accurately reprice the input instruments used to build it. The multi-dimensional solver is recommended when cubic interpolation methods are used in building the curve (in other cases, performance might be inferior to the regular bootstrapping method). - true: to use multi-dimensional solver for yield curve bootstrapping - false: not to use multi-dimensional solver for yield curve bootstrapping :return: bool """ return self._get_parameter("useMultiDimensionalSolver") @use_multi_dimensional_solver.setter def use_multi_dimensional_solver(self, value): self._set_parameter("useMultiDimensionalSolver", value) @property def use_steps(self): """ false / true Default value is: false. It indicates if the system needs to retrieve the overnight index stepped dates or/and rates :return: bool """ return self._get_parameter("useSteps") @use_steps.setter def use_steps(self, value): self._set_parameter("useSteps", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/curve/_zc_curve_parameters.py
0.899899
0.371393
_zc_curve_parameters.py
pypi
__all__ = ["ZcCurveMainDefinition"] from ..instrument._definition import ObjectDefinition from ..enum_types.asset_class import AssetClass from ..enum_types.risk_type import RiskType from ._zc_curve_definition import ZcCurveDefinition class ZcCurveMainDefinition(ObjectDefinition): def __init__( self, index_name=None, index_tenors=None, main_constituent_asset_class=None, pivot_curve_definition=None, reference_curve_definition=None, risk_type=None, currency=None, discounting_tenor=None, id=None, name=None, source=None, ): super().__init__() self.index_name = index_name self.index_tenors = index_tenors self.main_constituent_asset_class = main_constituent_asset_class self.pivot_curve_definition = pivot_curve_definition self.reference_curve_definition = reference_curve_definition self.risk_type = risk_type self.currency = currency self.discounting_tenor = discounting_tenor self.id = id self.name = name self.source = source @property def index_tenors(self): """ Defines expected rate surface tenor/slices Defaults to the tenors available, based on provided market data :return: list string """ return self._get_list_parameter(str, "indexTenors") @index_tenors.setter def index_tenors(self, value): self._set_list_parameter(str, "indexTenors", value) @property def main_constituent_asset_class(self): """ :return: enum AssetClass """ return self._get_enum_parameter(AssetClass, "mainConstituentAssetClass") @main_constituent_asset_class.setter def main_constituent_asset_class(self, value): self._set_enum_parameter(AssetClass, "mainConstituentAssetClass", value) @property def pivot_curve_definition(self): """ :return: object ZcCurveDefinition """ return self._get_object_parameter(ZcCurveDefinition, "pivotCurveDefinition") @pivot_curve_definition.setter def pivot_curve_definition(self, value): self._set_object_parameter(ZcCurveDefinition, "pivotCurveDefinition", value) @property def reference_curve_definition(self): """ :return: object ZcCurveDefinition """ return self._get_object_parameter(ZcCurveDefinition, "referenceCurveDefinition") @reference_curve_definition.setter def reference_curve_definition(self, value): self._set_object_parameter(ZcCurveDefinition, "referenceCurveDefinition", value) @property def risk_type(self): """ :return: enum RiskType """ return self._get_enum_parameter(RiskType, "riskType") @risk_type.setter def risk_type(self, value): self._set_enum_parameter(RiskType, "riskType", value) @property def currency(self): """ :return: str """ return self._get_parameter("currency") @currency.setter def currency(self, value): self._set_parameter("currency", value) @property def discounting_tenor(self): """ :return: str """ return self._get_parameter("discountingTenor") @discounting_tenor.setter def discounting_tenor(self, value): self._set_parameter("discountingTenor", value) @property def id(self): """ Id of the curve definition to get :return: str """ return self._get_parameter("id") @id.setter def id(self, value): self._set_parameter("id", value) @property def index_name(self): """ :return: str """ return self._get_parameter("indexName") @index_name.setter def index_name(self, value): self._set_parameter("indexName", value) @property def name(self): """ :return: str """ return self._get_parameter("name") @name.setter def name(self, value): self._set_parameter("name", value) @property def source(self): """ :return: str """ return self._get_parameter("source") @source.setter def source(self, value): self._set_parameter("source", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/curve/_zc_curve_main_definition.py
0.870239
0.165897
_zc_curve_main_definition.py
pypi
__all__ = ["CurvePoint"] from ...instrument._definition import ObjectDefinition from ._instrument import Instrument class CurvePoint(ObjectDefinition): def __init__( self, start_date=None, end_date=None, tenor=None, instruments=None, discount_factor=None, rate_percent=None, ): super().__init__() self.start_date = start_date self.end_date = end_date self.tenor = tenor self.instruments = instruments self.discount_factor = discount_factor self.rate_percent = rate_percent @property def instruments(self): """ :return: list Instrument """ return self._get_list_parameter(Instrument, "instruments") @instruments.setter def instruments(self, value): self._set_list_parameter(Instrument, "instruments", value) @property def discount_factor(self): """ :return: float """ return self._get_parameter("discountFactor") @discount_factor.setter def discount_factor(self, value): self._set_parameter("discountFactor", value) @property def end_date(self): """ :return: str """ return self._get_parameter("endDate") @end_date.setter def end_date(self, value): self._set_parameter("endDate", value) @property def rate_percent(self): """ :return: float """ return self._get_parameter("ratePercent") @rate_percent.setter def rate_percent(self, value): self._set_parameter("ratePercent", value) @property def start_date(self): """ :return: str """ return self._get_parameter("startDate") @start_date.setter def start_date(self, value): self._set_parameter("startDate", value) @property def tenor(self): """ :return: str """ return self._get_parameter("tenor") @tenor.setter def tenor(self, value): self._set_parameter("tenor", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/curve/_curve_point.py
0.907147
0.203055
_curve_point.py
pypi
__all__ = ["SwapZcCurveDefinition"] from ..instrument._definition import ObjectDefinition from ..enum_types.asset_class import AssetClass from ..enum_types.risk_type import RiskType class SwapZcCurveDefinition(ObjectDefinition): def __init__( self, index_name=None, index_tenors=None, main_constituent_asset_class=None, risk_type=None, currency=None, discounting_tenor=None, id=None, name=None, source=None, ): super().__init__() self.index_name = index_name self.index_tenors = index_tenors self.main_constituent_asset_class = main_constituent_asset_class self.risk_type = risk_type self.currency = currency self.discounting_tenor = discounting_tenor self.id = id self.name = name self.source = source @property def index_tenors(self): """ Defines expected rate surface tenor/slices Defaults to the tenors available, based on provided market data :return: list string """ return self._get_list_parameter(str, "indexTenors") @index_tenors.setter def index_tenors(self, value): self._set_list_parameter(str, "indexTenors", value) @property def main_constituent_asset_class(self): """ :return: enum AssetClass """ return self._get_enum_parameter(AssetClass, "mainConstituentAssetClass") @main_constituent_asset_class.setter def main_constituent_asset_class(self, value): self._set_enum_parameter(AssetClass, "mainConstituentAssetClass", value) @property def risk_type(self): """ :return: enum RiskType """ return self._get_enum_parameter(RiskType, "riskType") @risk_type.setter def risk_type(self, value): self._set_enum_parameter(RiskType, "riskType", value) @property def currency(self): """ :return: str """ return self._get_parameter("currency") @currency.setter def currency(self, value): self._set_parameter("currency", value) @property def discounting_tenor(self): """ :return: str """ return self._get_parameter("discountingTenor") @discounting_tenor.setter def discounting_tenor(self, value): self._set_parameter("discountingTenor", value) @property def id(self): """ Id of the curve definition to get :return: str """ return self._get_parameter("id") @id.setter def id(self, value): self._set_parameter("id", value) @property def index_name(self): """ :return: str """ return self._get_parameter("indexName") @index_name.setter def index_name(self, value): self._set_parameter("indexName", value) @property def name(self): """ :return: str """ return self._get_parameter("name") @name.setter def name(self, value): self._set_parameter("name", value) @property def source(self): """ :return: str """ return self._get_parameter("source") @source.setter def source(self, value): self._set_parameter("source", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/curve/_swap_zc_curve_definition.py
0.856422
0.193033
_swap_zc_curve_definition.py
pypi
__all__ = ["ForwardCurveDefinition"] from ..instrument._definition import ObjectDefinition class ForwardCurveDefinition(ObjectDefinition): def __init__( self, index_tenor=None, forward_curve_tenors=None, forward_curve_tag=None, forward_start_date=None, forward_start_tenor=None, ): super().__init__() self.index_tenor = index_tenor self.forward_curve_tenors = forward_curve_tenors self.forward_curve_tag = forward_curve_tag self.forward_start_date = forward_start_date self.forward_start_tenor = forward_start_tenor @property def forward_curve_tenors(self): """ Defines expected forward rate surface tenor/slices :return: list string """ return self._get_list_parameter(str, "forwardCurveTenors") @forward_curve_tenors.setter def forward_curve_tenors(self, value): self._set_list_parameter(str, "forwardCurveTenors", value) @property def forward_curve_tag(self): """ :return: str """ return self._get_parameter("forwardCurveTag") @forward_curve_tag.setter def forward_curve_tag(self, value): self._set_parameter("forwardCurveTag", value) @property def forward_start_date(self): """ Defines the forward start date by date format :return: str """ return self._get_parameter("forwardStartDate") @forward_start_date.setter def forward_start_date(self, value): self._set_parameter("forwardStartDate", value) @property def forward_start_tenor(self): """ Defines the forward start date by tenor format: "Spot" / "1M" / ... :return: str """ return self._get_parameter("forwardStartTenor") @forward_start_tenor.setter def forward_start_tenor(self, value): self._set_parameter("forwardStartTenor", value) @property def index_tenor(self): """ :return: str """ return self._get_parameter("indexTenor") @index_tenor.setter def index_tenor(self, value): self._set_parameter("indexTenor", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/curve/_forward_curve_definition.py
0.93115
0.198433
_forward_curve_definition.py
pypi
__all__ = ["BaseDefinition"] import abc import numpy as np from scipy import interpolate import pandas as pd from refinitiv.dataplatform.delivery.data.endpoint import Endpoint from ._surface_output import SurfaceOutput from ..enum_types.underlying_type import UnderlyingType from ..enum_types.axis import Axis from ..instrument._definition import ObjectDefinition def parse_axis(universe): surface = universe.get("surface") strike_axis = surface[0][1:] surface = surface[1:] time_axis = [] surface_grid = [] for curve in surface: time_axis.append(curve[0]) surface_grid.append(curve[1:]) x = np.array(strike_axis, dtype=float) y = np.array(time_axis, dtype="datetime64") Z = np.array(surface_grid, dtype=float) X, Y = np.meshgrid(x, y) return X, Y, Z def interpolate_data(x, y, z, x_value, y_value): x_axis = x[0] y_axis = y[:, 0] if isinstance(y_value, str): y_value = np.datetime64(y_value) return interpolate.interp2d(x_axis, y_axis.astype(int), z)( x_value, y_value.astype(int) ) class BaseDefinition(ObjectDefinition, abc.ABC): class Surface(object): def __init__(self, x, y, z): super().__init__() self.x = x self.y = y self.z = z def get_axis(self): return self.x, self.y, self.z def get_curve(self, value, axis_type): axis = [] curve = None if axis_type is Axis.X: axis = self.x[0] elif axis_type is Axis.Y: axis = np.array([str(i) for i in self.y[:, 0]]) elif axis_type is Axis.Z: axis = self.z if value not in axis: # interpolate if axis_type is Axis.X: curve_y = self.y[:, 0] curve_z = interpolate_data(self.x, self.y, self.z, value, curve_y)[ :, 0 ] curve = curve_y, curve_z elif axis_type is Axis.Y: curve_x = self.x[0, :] curve_z = interpolate_data(self.x, self.y, self.z, curve_x, value) curve = curve_x, curve_z else: if axis_type is Axis.X: index = np.where(axis == value)[0][0] curve_y = self.y[:, index] curve_z = self.z[:, index] curve = curve_y, curve_z elif axis_type is Axis.Y: index = np.where(axis == value)[0][0] curve_x = self.x[index, :] curve_z = self.z[index, :] curve = curve_x, curve_z return curve def get_point(self, x, y): point = None if x in self.x and np.datetime64(y) in self.y: index1 = np.where(self.x == x)[1][0] index2 = np.where(self.y == np.datetime64(y))[0][0] point = self.z[index2][index1] else: # interpolate point = interpolate_data(self.x, self.y, self.z, x, y)[0] return point class Data(Endpoint.EndpointData): def __init__(self, raw, surface=None): super().__init__(raw) self._surface = surface @property def surface(self): if self._surface is None and self._raw: universe = self._raw.get("data")[0] self._surface = BaseDefinition.Surface(*parse_axis(universe)) return self._surface @property def df(self): if self._dataframe is None and self._raw: data_frame = None if False: dfs = [] for universe in self._raw.get("data"): surface_tag = universe.get("surfaceTag") x, y, z = parse_axis(universe) index = pd.MultiIndex.from_product([[surface_tag], ["x", "y"]]) df = pd.DataFrame([x, y], index=index) dfs.append(df) data_frame = pd.concat(dfs) else: data = self._raw.get("data") if data: data_frame = pd.DataFrame(data) else: data_frame = pd.DataFrame([]) if not data_frame.empty: data_frame = data_frame.convert_dtypes() self._dataframe = data_frame return self._dataframe class Response(Endpoint.EndpointResponse): def __init__(self, response): super().__init__(response, service_class=BaseDefinition) _raw_json = self.data.raw if self.data else None if self.is_success: _error = _raw_json["data"][0].get("error", "") if _error: self._status = _error["status"] if self._status == "Error": self._is_success = False self._error_code = _error["code"] self._error_message = _error["message"] def __init__( self, underlying_type, tag, layout, ): super().__init__() self.tag = tag self.layout = layout self.underlying_type = underlying_type @property def layout(self): """ The section that contains the properties that define how the volatility surface is returned :return: object SurfaceOutput """ return self._get_object_parameter(SurfaceOutput, "surfaceLayout") @layout.setter def layout(self, value): self._set_object_parameter(SurfaceOutput, "surfaceLayout", value) @property def underlying_type(self): """ The type of the underlying used to generate the volatility surface :return: enum UnderlyingType """ return self._get_enum_parameter(UnderlyingType, "underlyingType") @underlying_type.setter def underlying_type(self, value): self._set_enum_parameter(UnderlyingType, "underlyingType", value) @property def tag(self): """ A user defined string to describe the volatility surface :return: str """ return self._get_parameter("surfaceTag") @tag.setter def tag(self, value): self._set_parameter("surfaceTag", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/surface/_base_definition.py
0.832509
0.312383
_base_definition.py
pypi
__all__ = ["SurfaceOutput"] from refinitiv.dataplatform.content.ipa.instrument._definition import ObjectDefinition from ._volatility_surface_point import VolatilitySurfacePoint from ..enum_types.format import Format class SurfaceOutput(ObjectDefinition): def __init__( self, data_points=None, format=None, x_values=None, y_values=None, x_point_count=None, y_point_count=None, ): super().__init__() self.data_points = data_points self.format = format self.x_values = x_values self.y_values = y_values self.x_point_count = x_point_count self.y_point_count = y_point_count @property def data_points(self): """ Specifies the list of specific data points to be returned. :return: list VolatilitySurfacePoint """ return self._get_list_parameter(VolatilitySurfacePoint, "dataPoints") @data_points.setter def data_points(self, value): self._set_list_parameter(VolatilitySurfacePoint, "dataPoints", value) @property def format(self): """ Specifies whether the calculated volatilities are returned as a list or a matrix. :return: enum Format """ return self._get_enum_parameter(Format, "format") @format.setter def format(self, value): self._set_enum_parameter(Format, "format", value) @property def x_values(self): """ Specifies a list of discrete values for the x-axis. :return: list string """ return self._get_list_parameter(str, "xValues") @x_values.setter def x_values(self, value): self._set_list_parameter(str, "xValues", value) @property def y_values(self): """ Specifies a list of discrete values for the y-axis. :return: list string """ return self._get_list_parameter(str, "yValues") @y_values.setter def y_values(self, value): self._set_list_parameter(str, "yValues", value) @property def x_point_count(self): """ Specifies the number of values that will be generated along the x-axis. These values will distributed depending on the available input data and the type of volatility. :return: int """ return self._get_parameter("xPointCount") @x_point_count.setter def x_point_count(self, value): self._set_parameter("xPointCount", value) @property def y_point_count(self): """ Specifies the number of values that will be generated along the y-axis. These values will distributed depending on the available input data and the type of volatility. :return: int """ return self._get_parameter("yPointCount") @y_point_count.setter def y_point_count(self, value): self._set_parameter("yPointCount", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/surface/_surface_output.py
0.909093
0.389866
_surface_output.py
pypi
__all__ = ["Definition"] from .._base_definition import BaseDefinition from ._cap_calculation_params import CalculationParams from ...enum_types.discounting_type import DiscountingType from ...instrument._definition import ObjectDefinition from ...enum_types.underlying_type import UnderlyingType class CapUnderlyingDefinition(ObjectDefinition): def __init__(self, instrument_code=None, discounting_type=None): super().__init__() self.instrument_code = instrument_code self.discounting_type = discounting_type @property def discounting_type(self): """ the discounting type of the IR vol model: OisDiscounting, or BorDiscounting (default) :return: enum DiscountingType """ return self._get_enum_parameter(DiscountingType, "discountingType") @discounting_type.setter def discounting_type(self, value): self._set_enum_parameter(DiscountingType, "discountingType", value) @property def instrument_code(self): """ The currency of the stripped cap surface, vol cube, or interest rate vol model :return: str """ return self._get_parameter("instrumentCode") @instrument_code.setter def instrument_code(self, value): self._set_parameter("instrumentCode", value) class Definition(BaseDefinition): def __init__( self, *, instrument_code=None, discounting_type=None, tag, layout, calculation_params, ): super().__init__(tag=tag, layout=layout, underlying_type=UnderlyingType.CAP) self.calculation_params = calculation_params self.underlying_definition = CapUnderlyingDefinition( instrument_code=instrument_code, discounting_type=discounting_type ) @property def calculation_params(self): """ The section that contains the properties that define how the volatility surface is generated :return: object EtiSurfaceParameters """ return self._get_object_parameter(CalculationParams, "surfaceParameters") @calculation_params.setter def calculation_params(self, value): self._set_object_parameter(CalculationParams, "surfaceParameters", value) @property def underlying_definition(self): """ The section that contains the properties that define the underlying instrument :return: object EtiSurfaceDefinition """ return self._get_object_parameter( CapUnderlyingDefinition, "underlyingDefinition" ) @underlying_definition.setter def underlying_definition(self, value): self._set_object_parameter( CapUnderlyingDefinition, "underlyingDefinition", value )
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/surface/cap/_cap_definition.py
0.878171
0.31012
_cap_definition.py
pypi
__all__ = ["CalculationParams"] from ...enum_types.input_volatility_type import InputVolatilityType from ...enum_types.volatility_adjustment_type import VolatilityAdjustmentType from ...enum_types.axis import Axis from ...instrument._definition import ObjectDefinition class CalculationParams(ObjectDefinition): def __init__( self, input_volatility_type=None, volatility_adjustment_type=None, x_axis=None, y_axis=None, z_axis=None, market_data_date=None, shift_percent=None, source=None, valuation_date=None, ): super().__init__() self.input_volatility_type = input_volatility_type self.volatility_adjustment_type = volatility_adjustment_type self.x_axis = x_axis self.y_axis = y_axis self.z_axis = z_axis self.market_data_date = market_data_date self.shift_percent = shift_percent self.source = source self.valuation_date = valuation_date @property def input_volatility_type(self): """ :return: enum InputVolatilityType """ return self._get_enum_parameter(InputVolatilityType, "inputVolatilityType") @input_volatility_type.setter def input_volatility_type(self, value): self._set_enum_parameter(InputVolatilityType, "inputVolatilityType", value) @property def volatility_adjustment_type(self): """ Volatility Adjustment method for stripping: ConstantCaplet, ConstantCap, ShiftedCap, NormalizedCap, NormalizedCaplet :return: enum VolatilityAdjustmentType """ return self._get_enum_parameter( VolatilityAdjustmentType, "volatilityAdjustmentType" ) @volatility_adjustment_type.setter def volatility_adjustment_type(self, value): self._set_enum_parameter( VolatilityAdjustmentType, "volatilityAdjustmentType", value ) @property def x_axis(self): """ Specifies the unit for the x axis (e.g. Date, Tenor) :return: enum Axis """ return self._get_enum_parameter(Axis, "xAxis") @x_axis.setter def x_axis(self, value): self._set_enum_parameter(Axis, "xAxis", value) @property def y_axis(self): """ Specifies the unit for the y axis (e.g. Strike, Delta). This may depend on the asset class. For Fx Volatility Surface, we support both Delta and Strike. :return: enum Axis """ return self._get_enum_parameter(Axis, "yAxis") @y_axis.setter def y_axis(self, value): self._set_enum_parameter(Axis, "yAxis", value) @property def z_axis(self): """ Specifies the unit for the z axis (e.g. Strike, Tenor, Expiries). This applies on Ir SABR Volatility Cube. :return: enum Axis """ return self._get_enum_parameter(Axis, "zAxis") @z_axis.setter def z_axis(self, value): self._set_enum_parameter(Axis, "zAxis", value) @property def market_data_date(self): """ :return: str """ return self._get_parameter("marketDataDate") @market_data_date.setter def market_data_date(self, value): self._set_parameter("marketDataDate", value) @property def shift_percent(self): """ Shift value to use in calibration(Strike/Forward). Default: 0.0 :return: float """ return self._get_parameter("shiftPercent") @shift_percent.setter def shift_percent(self, value): self._set_parameter("shiftPercent", value) @property def source(self): """ Requested volatility data contributor. :return: str """ return self._get_parameter("source") @source.setter def source(self, value): self._set_parameter("source", value) @property def valuation_date(self): """ :return: str """ return self._get_parameter("valuationDate") @valuation_date.setter def valuation_date(self, value): self._set_parameter("valuationDate", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/surface/cap/_cap_calculation_params.py
0.905875
0.380327
_cap_calculation_params.py
pypi
__all__ = ["Definition"] from .._base_definition import BaseDefinition from ._swaption_calculation_params import CalculationParams from ...enum_types.discounting_type import DiscountingType from ...instrument._definition import ObjectDefinition from ...enum_types.underlying_type import UnderlyingType class SwaptionUnderlyingDefinition(ObjectDefinition): def __init__(self, instrument_code=None, discounting_type=None): super().__init__() self.instrument_code = instrument_code self.discounting_type = discounting_type @property def discounting_type(self): """ the discounting type of the IR vol model: OisDiscounting, or BorDiscounting (default) :return: enum DiscountingType """ return self._get_enum_parameter(DiscountingType, "discountingType") @discounting_type.setter def discounting_type(self, value): self._set_enum_parameter(DiscountingType, "discountingType", value) @property def instrument_code(self): """ The currency of the stripped cap surface, vol cube, or interest rate vol model :return: str """ return self._get_parameter("instrumentCode") @instrument_code.setter def instrument_code(self, value): self._set_parameter("instrumentCode", value) class Definition(BaseDefinition): def __init__( self, *, instrument_code=None, discounting_type=None, tag, layout, calculation_params, ): super().__init__( tag=tag, layout=layout, underlying_type=UnderlyingType.SWAPTION ) self.calculation_params = calculation_params self.underlying_definition = SwaptionUnderlyingDefinition( instrument_code=instrument_code, discounting_type=discounting_type ) @property def calculation_params(self): """ The section that contains the properties that define how the volatility surface is generated :return: object EtiSurfaceParameters """ return self._get_object_parameter(CalculationParams, "surfaceParameters") @calculation_params.setter def calculation_params(self, value): self._set_object_parameter(CalculationParams, "surfaceParameters", value) @property def underlying_definition(self): """ The section that contains the properties that define the underlying instrument :return: object EtiSurfaceDefinition """ return self._get_object_parameter( SwaptionUnderlyingDefinition, "underlyingDefinition" ) @underlying_definition.setter def underlying_definition(self, value): self._set_object_parameter( SwaptionUnderlyingDefinition, "underlyingDefinition", value )
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/surface/swaption/_swaption_definition.py
0.867191
0.295446
_swaption_definition.py
pypi
__all__ = ["CalculationParams"] from ...enum_types.input_volatility_type import InputVolatilityType from ...enum_types.volatility_adjustment_type import VolatilityAdjustmentType from ...enum_types.axis import Axis from ...instrument._definition import ObjectDefinition class CalculationParams(ObjectDefinition): def __init__( self, input_volatility_type=None, volatility_adjustment_type=None, x_axis=None, y_axis=None, z_axis=None, market_data_date=None, shift_percent=None, source=None, valuation_date=None, ): super().__init__() self.input_volatility_type = input_volatility_type self.volatility_adjustment_type = volatility_adjustment_type self.x_axis = x_axis self.y_axis = y_axis self.z_axis = z_axis self.market_data_date = market_data_date self.shift_percent = shift_percent self.source = source self.valuation_date = valuation_date @property def input_volatility_type(self): """ :return: enum InputVolatilityType """ return self._get_enum_parameter(InputVolatilityType, "inputVolatilityType") @input_volatility_type.setter def input_volatility_type(self, value): self._set_enum_parameter(InputVolatilityType, "inputVolatilityType", value) @property def volatility_adjustment_type(self): """ Volatility Adjustment method for stripping: ConstantCaplet, ConstantCap, ShiftedCap, NormalizedCap, NormalizedCaplet :return: enum VolatilityAdjustmentType """ return self._get_enum_parameter( VolatilityAdjustmentType, "volatilityAdjustmentType" ) @volatility_adjustment_type.setter def volatility_adjustment_type(self, value): self._set_enum_parameter( VolatilityAdjustmentType, "volatilityAdjustmentType", value ) @property def x_axis(self): """ Specifies the unit for the x axis (e.g. Date, Tenor) :return: enum Axis """ return self._get_enum_parameter(Axis, "xAxis") @x_axis.setter def x_axis(self, value): self._set_enum_parameter(Axis, "xAxis", value) @property def y_axis(self): """ Specifies the unit for the y axis (e.g. Strike, Delta). This may depend on the asset class. For Fx Volatility Surface, we support both Delta and Strike. :return: enum Axis """ return self._get_enum_parameter(Axis, "yAxis") @y_axis.setter def y_axis(self, value): self._set_enum_parameter(Axis, "yAxis", value) @property def z_axis(self): """ Specifies the unit for the z axis (e.g. Strike, Tenor, Expiries). This applies on Ir SABR Volatility Cube. :return: enum Axis """ return self._get_enum_parameter(Axis, "zAxis") @z_axis.setter def z_axis(self, value): self._set_enum_parameter(Axis, "zAxis", value) @property def market_data_date(self): """ :return: str """ return self._get_parameter("marketDataDate") @market_data_date.setter def market_data_date(self, value): self._set_parameter("marketDataDate", value) @property def shift_percent(self): """ Shift value to use in calibration(Strike/Forward). Default: 0.0 :return: float """ return self._get_parameter("shiftPercent") @shift_percent.setter def shift_percent(self, value): self._set_parameter("shiftPercent", value) @property def source(self): """ Requested volatility data contributor. :return: str """ return self._get_parameter("source") @source.setter def source(self, value): self._set_parameter("source", value) @property def valuation_date(self): """ :return: str """ return self._get_parameter("valuationDate") @valuation_date.setter def valuation_date(self, value): self._set_parameter("valuationDate", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/surface/swaption/_swaption_calculation_params.py
0.905875
0.380327
_swaption_calculation_params.py
pypi
__all__ = ["Definition"] from ._fx_calculation_params import CalculationParams from .._base_definition import BaseDefinition from ...enum_types.underlying_type import UnderlyingType from ...instrument._definition import ObjectDefinition class FxUnderlyingDefinition(ObjectDefinition): def __init__(self, fx_cross_code): super().__init__() self.fx_cross_code = fx_cross_code @property def fx_cross_code(self): """ The ISO code of the cross currency (e.g. 'EURCHF'). Mandatory. :return: str """ return self._get_parameter("fxCrossCode") @fx_cross_code.setter def fx_cross_code(self, value): self._set_parameter("fxCrossCode", value) class Definition(BaseDefinition): def __init__(self, fx_cross_code, tag, layout, calculation_params): super().__init__(tag=tag, layout=layout, underlying_type=UnderlyingType.FX) self.calculation_params = calculation_params self.underlying_definition = FxUnderlyingDefinition(fx_cross_code=fx_cross_code) @property def calculation_params(self): """ The section that contains the properties that define how the volatility surface is generated :return: object CalculationParams """ return self._get_object_parameter(CalculationParams, "surfaceParameters") @calculation_params.setter def calculation_params(self, value): self._set_object_parameter(CalculationParams, "surfaceParameters", value) @property def underlying_definition(self): """ The section that contains the properties that define the underlying instrument :return: object FxVolatilitySurfaceDefinition """ return self._get_object_parameter( FxUnderlyingDefinition, "underlyingDefinition" ) @underlying_definition.setter def underlying_definition(self, value): self._set_object_parameter( FxUnderlyingDefinition, "underlyingDefinition", value )
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/surface/fx/_fx_definition.py
0.88495
0.257497
_fx_definition.py
pypi
__all__ = ["CalculationParams"] from ...instrument._definition import ObjectDefinition from ...enum_types.price_side import PriceSide from ...enum_types.fx_swap_calculation_method import FxSwapCalculationMethod from ...enum_types.fx_volatility_model import FxVolatilityModel from ...enum_types.axis import Axis from ...enum_types.time_stamp import TimeStamp from ...models import BidAskMid from ...models import InterpolationWeight class CalculationParams(ObjectDefinition): def __init__( self, *, atm_volatility_object=None, butterfly10_d_object=None, butterfly25_d_object=None, domestic_deposit_rate_percent_object=None, foreign_deposit_rate_percent_object=None, forward_points_object=None, fx_spot_object=None, fx_swap_calculation_method=None, implied_volatility_object=None, interpolation_weight=None, price_side=None, risk_reversal10_d_object=None, risk_reversal25_d_object=None, time_stamp=None, volatility_model=None, x_axis, y_axis, calculation_date=None, ): super().__init__() self.atm_volatility_object = atm_volatility_object self.butterfly10_d_object = butterfly10_d_object self.butterfly25_d_object = butterfly25_d_object self.domestic_deposit_rate_percent_object = domestic_deposit_rate_percent_object self.foreign_deposit_rate_percent_object = foreign_deposit_rate_percent_object self.forward_points_object = forward_points_object self.fx_spot_object = fx_spot_object self.fx_swap_calculation_method = fx_swap_calculation_method self.implied_volatility_object = implied_volatility_object self.interpolation_weight = interpolation_weight self.price_side = price_side self.risk_reversal10_d_object = risk_reversal10_d_object self.risk_reversal25_d_object = risk_reversal25_d_object self.time_stamp = time_stamp self.volatility_model = volatility_model self.x_axis = x_axis self.y_axis = y_axis self.calculation_date = calculation_date @property def atm_volatility_object(self): """ At the money volatility at Expiry :return: object BidAskMid """ return self._get_object_parameter(BidAskMid, "atmVolatilityObject") @atm_volatility_object.setter def atm_volatility_object(self, value): self._set_object_parameter(BidAskMid, "atmVolatilityObject", value) @property def butterfly10_d_object(self): """ BF 10 Days at Expiry :return: object BidAskMid """ return self._get_object_parameter(BidAskMid, "butterfly10DObject") @butterfly10_d_object.setter def butterfly10_d_object(self, value): self._set_object_parameter(BidAskMid, "butterfly10DObject", value) @property def butterfly25_d_object(self): """ BF 25 Days at Expiry :return: object BidAskMid """ return self._get_object_parameter(BidAskMid, "butterfly25DObject") @butterfly25_d_object.setter def butterfly25_d_object(self, value): self._set_object_parameter(BidAskMid, "butterfly25DObject", value) @property def domestic_deposit_rate_percent_object(self): """ Domestic Deposit Rate at Expiry :return: object BidAskMid """ return self._get_object_parameter(BidAskMid, "domesticDepositRatePercentObject") @domestic_deposit_rate_percent_object.setter def domestic_deposit_rate_percent_object(self, value): self._set_object_parameter(BidAskMid, "domesticDepositRatePercentObject", value) @property def foreign_deposit_rate_percent_object(self): """ Foreign Deposit Rate at Expiry :return: object BidAskMid """ return self._get_object_parameter(BidAskMid, "foreignDepositRatePercentObject") @foreign_deposit_rate_percent_object.setter def foreign_deposit_rate_percent_object(self, value): self._set_object_parameter(BidAskMid, "foreignDepositRatePercentObject", value) @property def forward_points_object(self): """ Forward Points at Expiry :return: object BidAskMid """ return self._get_object_parameter(BidAskMid, "forwardPointsObject") @forward_points_object.setter def forward_points_object(self, value): self._set_object_parameter(BidAskMid, "forwardPointsObject", value) @property def fx_spot_object(self): """ Spot Price :return: object BidAskMid """ return self._get_object_parameter(BidAskMid, "fxSpotObject") @fx_spot_object.setter def fx_spot_object(self, value): self._set_object_parameter(BidAskMid, "fxSpotObject", value) @property def fx_swap_calculation_method(self): """ The method we chose to price outrights using or not implied deposits. Possible values are: FxSwap (compute outrights using swap points), DepositCcy1ImpliedFromFxSwap (compute currency1 deposits using swap points), DepositCcy2ImpliedFromFxSwap (compute currency2 deposits using swap points). Optional. Defaults to 'FxSwap'. :return: enum FxSwapCalculationMethod """ return self._get_enum_parameter( FxSwapCalculationMethod, "fxSwapCalculationMethod" ) @fx_swap_calculation_method.setter def fx_swap_calculation_method(self, value): self._set_enum_parameter( FxSwapCalculationMethod, "fxSwapCalculationMethod", value ) @property def implied_volatility_object(self): """ Implied Volatility at Expiry :return: object BidAskMid """ return self._get_object_parameter(BidAskMid, "impliedVolatilityObject") @implied_volatility_object.setter def implied_volatility_object(self, value): self._set_object_parameter(BidAskMid, "impliedVolatilityObject", value) @property def interpolation_weight(self): """ Vol Term Structure Interpolation :return: object InterpolationWeight """ return self._get_object_parameter(InterpolationWeight, "interpolationWeight") @interpolation_weight.setter def interpolation_weight(self, value): self._set_object_parameter(InterpolationWeight, "interpolationWeight", value) @property def price_side(self): """ Specifies whether bid, ask or mid is used to build the surface. :return: enum FxPriceSide """ return self._get_enum_parameter(PriceSide, "priceSide") @price_side.setter def price_side(self, value): self._set_enum_parameter(PriceSide, "priceSide", value) @property def risk_reversal10_d_object(self): """ RR 10 Days at Expiry :return: object BidAskMid """ return self._get_object_parameter(BidAskMid, "riskReversal10DObject") @risk_reversal10_d_object.setter def risk_reversal10_d_object(self, value): self._set_object_parameter(BidAskMid, "riskReversal10DObject", value) @property def risk_reversal25_d_object(self): """ RR 25 Days at Expiry :return: object BidAskMid """ return self._get_object_parameter(BidAskMid, "riskReversal25DObject") @risk_reversal25_d_object.setter def risk_reversal25_d_object(self, value): self._set_object_parameter(BidAskMid, "riskReversal25DObject", value) @property def time_stamp(self): """ Define how the timestamp is selected: - Open: the opening value of the valuationDate or if not available the close of the previous day is used. - Default: the latest snapshot is used when valuationDate is today, the close price when valuationDate is in the past. :return: enum TimeStamp """ return self._get_enum_parameter(TimeStamp, "timeStamp") @time_stamp.setter def time_stamp(self, value): self._set_enum_parameter(TimeStamp, "timeStamp", value) @property def volatility_model(self): """ The quantitative model used to generate the volatility surface. This may depend on the asset class. For Fx Volatility Surface, we currently support the SVI model. :return: enum FxVolatilityModel """ return self._get_enum_parameter(FxVolatilityModel, "volatilityModel") @volatility_model.setter def volatility_model(self, value): self._set_enum_parameter(FxVolatilityModel, "volatilityModel", value) @property def x_axis(self): """ Specifies the unit for the x axis (e.g. Date, Tenor) :return: enum Axis """ return self._get_enum_parameter(Axis, "xAxis") @x_axis.setter def x_axis(self, value): self._set_enum_parameter(Axis, "xAxis", value) @property def y_axis(self): """ Specifies the unit for the y axis (e.g. Strike, Delta). This may depend on the asset class. For Fx Volatility Surface, we support both Delta and Strike. :return: enum Axis """ return self._get_enum_parameter(Axis, "yAxis") @y_axis.setter def y_axis(self, value): self._set_enum_parameter(Axis, "yAxis", value) @property def calculation_date(self): """ The date the volatility surface is generated. :return: str """ return self._get_parameter("calculationDate") @calculation_date.setter def calculation_date(self, value): self._set_parameter("calculationDate", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/surface/fx/_fx_calculation_params.py
0.841988
0.172869
_fx_calculation_params.py
pypi
__all__ = ["CalculationParams"] from ...enum_types.axis import Axis from ...enum_types.eti_input_volatility_type import EtiInputVolatilityType from ...enum_types.price_side import PriceSide from ...enum_types.time_stamp import TimeStamp from ...enum_types.volatility_model import VolatilityModel from ...instrument._definition import ObjectDefinition class CalculationParams(ObjectDefinition): def __init__( self, input_volatility_type=None, price_side=None, time_stamp=None, volatility_model=None, x_axis=None, y_axis=None, calculation_date=None, ): super().__init__() self.input_volatility_type = input_volatility_type self.price_side = price_side self.time_stamp = time_stamp self.volatility_model = volatility_model self.x_axis = x_axis self.y_axis = y_axis self.calculation_date = calculation_date @property def input_volatility_type(self): """ Specifies the type of volatility used as an input of the model (calculated Implied Volatility, Settlement) - Settle: [DEPRECATED] The service uses the settlement volatility to build the volatility surface - Quoted: The service uses the quoted volatility to build the volatility surface - Implied: The service internally calculates implied volatilities for the option universe before building the surface :return: enum EtiInputVolatilityType """ return self._get_enum_parameter(EtiInputVolatilityType, "inputVolatilityType") @input_volatility_type.setter def input_volatility_type(self, value): self._set_enum_parameter(EtiInputVolatilityType, "inputVolatilityType", value) @property def price_side(self): """ Specifies whether bid, ask or mid is used to build the surface. :return: enum PriceSide """ return self._get_enum_parameter(PriceSide, "priceSide") @price_side.setter def price_side(self, value): self._set_enum_parameter(PriceSide, "priceSide", value) @property def time_stamp(self): """ Define how the timestamp is selected: - Open: the opening value of the valuationDate or if not available the close of the previous day is used. - Default: the latest snapshot is used when valuationDate is today, the close price when valuationDate is in the past. :return: enum TimeStamp """ return self._get_enum_parameter(TimeStamp, "timeStamp") @time_stamp.setter def time_stamp(self, value): self._set_enum_parameter(TimeStamp, "timeStamp", value) @property def volatility_model(self): """ The quantitative model used to generate the volatility surface. This may depend on the asset class. For Fx Volatility Surface, we currently support the SVI model. :return: enum VolatilityModel """ return self._get_enum_parameter(VolatilityModel, "volatilityModel") @volatility_model.setter def volatility_model(self, value): self._set_enum_parameter(VolatilityModel, "volatilityModel", value) @property def x_axis(self): """ Specifies the unit for the x axis (e.g. Date, Tenor) :return: enum Axis """ return self._get_enum_parameter(Axis, "xAxis") @x_axis.setter def x_axis(self, value): self._set_enum_parameter(Axis, "xAxis", value) @property def y_axis(self): """ Specifies the unit for the y axis (e.g. Strike, Delta). This may depend on the asset class. For Fx Volatility Surface, we support both Delta and Strike. :return: enum Axis """ return self._get_enum_parameter(Axis, "yAxis") @y_axis.setter def y_axis(self, value): self._set_enum_parameter(Axis, "yAxis", value) @property def calculation_date(self): """ The date the volatility surface is generated. :return: str """ return self._get_parameter("calculationDate") @calculation_date.setter def calculation_date(self, value): self._set_parameter("calculationDate", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/surface/eti/_eti_calculation_params.py
0.917103
0.415729
_eti_calculation_params.py
pypi
__all__ = ["Definition"] from .._base_definition import BaseDefinition from ._eti_calculation_params import CalculationParams from ...enum_types.underlying_type import UnderlyingType from ...instrument._definition import ObjectDefinition class EtiUnderlyingDefinition(ObjectDefinition): def __init__( self, instrument_code=None, clean_instrument_code=None, exchange=None, is_future_underlying=None, ): super().__init__() self.instrument_code = instrument_code self.clean_instrument_code = clean_instrument_code self.exchange = exchange self.is_future_underlying = is_future_underlying @property def clean_instrument_code(self): """ :return: str """ return self._get_parameter("cleanInstrumentCode") @clean_instrument_code.setter def clean_instrument_code(self, value): self._set_parameter("cleanInstrumentCode", value) @property def exchange(self): """ Specifies the exchange to be used to retrieve the underlying data. :return: str """ return self._get_parameter("exchange") @exchange.setter def exchange(self, value): self._set_parameter("exchange", value) @property def instrument_code(self): """ The code (RIC for equities and indices and RICROOT for Futures.) that represents the instrument. The format for equities and indices is xxx@RIC (Example: VOD.L@RIC) The format for Futures is xx@RICROOT (Example: CL@RICROOT) :return: str """ return self._get_parameter("instrumentCode") @instrument_code.setter def instrument_code(self, value): self._set_parameter("instrumentCode", value) @property def is_future_underlying(self): """ :return: bool """ return self._get_parameter("isFutureUnderlying") @is_future_underlying.setter def is_future_underlying(self, value): self._set_parameter("isFutureUnderlying", value) class Definition(BaseDefinition): def __init__( self, *, instrument_code=None, clean_instrument_code=None, exchange=None, is_future_underlying=None, tag, layout, calculation_params, ): super().__init__(tag=tag, layout=layout, underlying_type=UnderlyingType.ETI) self.calculation_params = calculation_params self.underlying_definition = EtiUnderlyingDefinition( instrument_code=instrument_code, clean_instrument_code=clean_instrument_code, exchange=exchange, is_future_underlying=is_future_underlying, ) @property def calculation_params(self): """ The section that contains the properties that define how the volatility surface is generated :return: object EtiSurfaceParameters """ return self._get_object_parameter(CalculationParams, "surfaceParameters") @calculation_params.setter def calculation_params(self, value): self._set_object_parameter(CalculationParams, "surfaceParameters", value) @property def underlying_definition(self): """ The section that contains the properties that define the underlying instrument :return: object EtiSurfaceDefinition """ return self._get_object_parameter( EtiUnderlyingDefinition, "underlyingDefinition" ) @underlying_definition.setter def underlying_definition(self, value): self._set_object_parameter( EtiUnderlyingDefinition, "underlyingDefinition", value )
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/surface/eti/_eti_definition.py
0.869604
0.250718
_eti_definition.py
pypi
__all__ = ["ProtectionLegDefinition"] from ...instrument._definition import ObjectDefinition from ...enum_types.seniority import Seniority from ...enum_types.doc_clause import DocClause from ...enum_types.direction import Direction class ProtectionLegDefinition(ObjectDefinition): def __init__( self, *, direction=None, doc_clause=None, index_factor, index_series, notional_amount, notional_ccy=None, recovery_rate, reference_entity=None, seniority, settlement_convention, recovery_rate_percent=None, ): super().__init__() self.direction = direction self.notional_ccy = notional_ccy self.notional_amount = notional_amount self.doc_clause = doc_clause self.seniority = seniority self.index_factor = index_factor self.index_series = index_series self.recovery_rate = recovery_rate self.recovery_rate_percent = recovery_rate_percent self.reference_entity = reference_entity self.settlement_convention = settlement_convention @property def direction(self): """ The direction of the leg. the possible values are: 'Paid' (the cash flows of the leg are paid to the counterparty), 'Received' (the cash flows of the leg are received from the counterparty). Optional for a single leg instrument (like a bond), in that case default value is Received. It is mandatory for a multi-instrument leg instrument (like Swap or CDS leg). :return: enum Direction """ return self._get_enum_parameter(Direction, "direction") @direction.setter def direction(self, value): self._set_enum_parameter(Direction, "direction", value) @property def doc_clause(self): """ The restructuring clause or credit event for Single Name Cds. The possible values are: - CumRestruct14, - ModifiedRestruct14, - ModModRestruct14, - ExRestruct14, - CumRestruct03, - ModifiedRestruct03, - ModModRestruct03, - ExRestruct03. Optional. By default the docClause of the referenceEntity's Primary Ric is used. :return: enum DocClause """ return self._get_enum_parameter(DocClause, "docClause") @doc_clause.setter def doc_clause(self, value): self._set_enum_parameter(DocClause, "docClause", value) @property def seniority(self): """ The order of repayment in the case of a credit event for Single Name Cds. The possible values are: - Secured (Secured Debt (Corporate/Financial) or Domestic Currency Sovereign Debt (Government)), - SeniorUnsecured (Senior Unsecured Debt (Corporate/Financial) or Foreign Currency Sovereign Debt (Government)), - Subordinated (Subordinated or Lower Tier 2 Debt (Banks)), - JuniorSubordinated (Junior Subordinated or Upper Tier 2 Debt (Banks)), - Preference (Preference Shares or Tier 1 Capital (Banks)). Optional. By default the seniority of the referenceEntity's Primary Ric is used. :return: enum Seniority """ return self._get_enum_parameter(Seniority, "seniority") @seniority.setter def seniority(self, value): self._set_enum_parameter(Seniority, "seniority", value) @property def index_factor(self): """ The factor that is applied to the notional in case a credit event happens in one of the constituents of the Cds Index. Optional. By default no factor (1) applies. :return: float """ return self._get_parameter("indexFactor") @index_factor.setter def index_factor(self, value): self._set_parameter("indexFactor", value) @property def index_series(self): """ The serie of the Cds Index. Optional. By default the serie of the BenchmarkRic is used. :return: int """ return self._get_parameter("indexSeries") @index_series.setter def index_series(self, value): self._set_parameter("indexSeries", value) @property def notional_amount(self): """ The notional amount of the leg at the period start date. Optional. By default 1,000,000 is used. :return: float """ return self._get_parameter("notionalAmount") @notional_amount.setter def notional_amount(self, value): self._set_parameter("notionalAmount", value) @property def notional_ccy(self): """ The ISO code of the notional currency. Mandatory if instrument code or instrument style has not been defined. In case an instrument code/style has been defined, value may comes from the reference data. :return: str """ return self._get_parameter("notionalCcy") @notional_ccy.setter def notional_ccy(self, value): self._set_parameter("notionalCcy", value) @property def recovery_rate(self): """ The percentage of recovery in case of a credit event. Optional. By default the recoveryRate of the Cds built from referenceEntity, seniority, docClause and notionalCurrency is used. :return: float """ return self._get_parameter("recoveryRate") @recovery_rate.setter def recovery_rate(self, value): self._set_parameter("recoveryRate", value) @property def recovery_rate_percent(self): """ The percentage of recovery in case of a credit event. Optional. By default the recoveryRate of the Cds built from referenceEntity, seniority, docClause and notionalCurrency is used. :return: float """ return self._get_parameter("recoveryRatePercent") @recovery_rate_percent.setter def recovery_rate_percent(self, value): self._set_parameter("recoveryRatePercent", value) @property def reference_entity(self): """ The identifier of the reference entity, it can be: - for Single Name : a RedCode, an OrgId, a reference entity's RIC, - for Index : a RedCode, a ShortName, a CommonName. Mandatory. :return: str """ return self._get_parameter("referenceEntity") @reference_entity.setter def reference_entity(self, value): self._set_parameter("referenceEntity", value) @property def settlement_convention(self): """ The cashSettlementRule of the CDS. Optional. By default "3WD" (3 week days) is used. :return: str """ return self._get_parameter("settlementConvention") @settlement_convention.setter def settlement_convention(self, value): self._set_parameter("settlementConvention", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/cds/_protection_leg_definition.py
0.876845
0.323086
_protection_leg_definition.py
pypi
__all__ = ["PremiumLegDefinition"] from ...instrument._definition import ObjectDefinition from ...enum_types.stub_rule import StubRule from ...enum_types.direction import Direction from ...enum_types.frequency import Frequency from ...enum_types.day_count_basis import DayCountBasis from ...enum_types.business_day_convention import BusinessDayConvention class PremiumLegDefinition(ObjectDefinition): def __init__( self, *, direction, notional_ccy=None, notional_amount=None, fixed_rate_percent=None, interest_payment_ccy, interest_payment_frequency, interest_calculation_method, accrued_calculation_method=None, payment_business_day_convention=None, first_regular_payment_date=None, last_regular_payment_date=None, payment_business_days=None, stub_rule=None, accrued_paid_on_default=None, ): super().__init__() self.direction = direction self.notional_ccy = notional_ccy self.notional_amount = notional_amount self.fixed_rate_percent = fixed_rate_percent self.interest_payment_frequency = interest_payment_frequency self.interest_calculation_method = interest_calculation_method self.accrued_calculation_method = accrued_calculation_method self.payment_business_day_convention = payment_business_day_convention self.first_regular_payment_date = first_regular_payment_date self.last_regular_payment_date = last_regular_payment_date self.payment_business_days = payment_business_days self.stub_rule = stub_rule self.accrued_paid_on_default = accrued_paid_on_default self.interest_payment_ccy = interest_payment_ccy @property def accrued_calculation_method(self): """ The Day Count Basis method used to calculate the accrued interest payments. Optional. By default, the same value than InterestCalculationMethod is used. :return: enum DayCountBasis """ return self._get_enum_parameter(DayCountBasis, "accruedCalculationMethod") @accrued_calculation_method.setter def accrued_calculation_method(self, value): self._set_enum_parameter(DayCountBasis, "accruedCalculationMethod", value) @property def direction(self): """ The direction of the leg. the possible values are: 'Paid' (the cash flows of the leg are paid to the counterparty), 'Received' (the cash flows of the leg are received from the counterparty). Optional for a single leg instrument (like a bond), in that case default value is Received. It is mandatory for a multi-instrument leg instrument (like Swap or CDS leg). :return: enum Direction """ return self._get_enum_parameter(Direction, "direction") @direction.setter def direction(self, value): self._set_enum_parameter(Direction, "direction", value) @property def interest_calculation_method(self): """ The Day Count Basis method used to calculate the coupon interest payments. Mandatory. :return: enum DayCountBasis """ return self._get_enum_parameter(DayCountBasis, "interestCalculationMethod") @interest_calculation_method.setter def interest_calculation_method(self, value): self._set_enum_parameter(DayCountBasis, "interestCalculationMethod", value) @property def interest_payment_frequency(self): """ The frequency of the interest payments. Optional if an instrument code/style have been defined : in that case, value comes from reference data. Otherwise, it is mandatory. :return: enum Frequency """ return self._get_enum_parameter(Frequency, "interestPaymentFrequency") @interest_payment_frequency.setter def interest_payment_frequency(self, value): self._set_enum_parameter(Frequency, "interestPaymentFrequency", value) @property def payment_business_day_convention(self): """ The method to adjust dates to a working day. The possible values are: - ModifiedFollowing (adjusts dates according to the Modified Following convention - next business day unless is it goes into the next month, preceeding is used in that case), - NextBusinessDay (adjusts dates according to the Following convention - Next Business Day), - PreviousBusinessDay (adjusts dates according to the Preceeding convention - Previous Business Day), - NoMoving (does not adjust dates), - BbswModifiedFollowing (adjusts dates according to the BBSW Modified Following convention). Optional. In case an instrument code/style has been defined, value comes from bond reference data. Otherwise 'ModifiedFollowing' is used. :return: enum BusinessDayConvention """ return self._get_enum_parameter( BusinessDayConvention, "paymentBusinessDayConvention" ) @payment_business_day_convention.setter def payment_business_day_convention(self, value): self._set_enum_parameter( BusinessDayConvention, "paymentBusinessDayConvention", value ) @property def stub_rule(self): """ The rule that defines whether coupon roll dates are aligned on the maturity or the issue date. The possible values are: - ShortFirstProRata (to create a short period between the start date and the first coupon date, and pay a smaller amount of interest for the short period.All coupon dates are calculated backward from the maturity date), - ShortFirstFull (to create a short period between the start date and the first coupon date, and pay a regular coupon on the first coupon date. All coupon dates are calculated backward from the maturity date), - LongFirstFull (to create a long period between the start date and the second coupon date, and pay a regular coupon on the second coupon date. All coupon dates are calculated backward from the maturity date), - ShortLastProRata (to create a short period between the last payment date and maturity, and pay a smaller amount of interest for the short period. All coupon dates are calculated forward from the start date). This property may also be used in conjunction with firstRegularPaymentDate and lastRegularPaymentDate; in that case the following values can be defined: - Issue (all dates are aligned on the issue date), - Maturity (all dates are aligned on the maturity date). Optional. By default 'Maturity' is used. :return: enum StubRule """ return self._get_enum_parameter(StubRule, "stubRule") @stub_rule.setter def stub_rule(self, value): self._set_enum_parameter(StubRule, "stubRule", value) @property def accrued_paid_on_default(self): """ Specifies whether the accrued is paid at the credit event date or not. - true : the accrued is paid at the credit event date - false : the accrued is not paid at the credit event date Optional. Defaults to false. :return: bool """ return self._get_parameter("accruedPaidOnDefault") @accrued_paid_on_default.setter def accrued_paid_on_default(self, value): self._set_parameter("accruedPaidOnDefault", value) @property def first_regular_payment_date(self): """ The first regular coupon payment date for leg with an odd first coupon. Optional. :return: str """ return self._get_parameter("firstRegularPaymentDate") @first_regular_payment_date.setter def first_regular_payment_date(self, value): self._set_parameter("firstRegularPaymentDate", value) @property def fixed_rate_percent(self): """ The fixed coupon rate in percentage. It is mandatory in case of a single leg instrument. Otherwise, in case of multi leg instrument, it can be computed as the Par rate. :return: float """ return self._get_parameter("fixedRatePercent") @fixed_rate_percent.setter def fixed_rate_percent(self, value): self._set_parameter("fixedRatePercent", value) @property def interest_payment_ccy(self): """ The ISO code of the interest payment currency. Mandatory. :return: str """ return self._get_parameter("interestPaymentCcy") @interest_payment_ccy.setter def interest_payment_ccy(self, value): self._set_parameter("interestPaymentCcy", value) @property def last_regular_payment_date(self): """ The last regular coupon payment date for leg with an odd last coupon. Optional. :return: str """ return self._get_parameter("lastRegularPaymentDate") @last_regular_payment_date.setter def last_regular_payment_date(self, value): self._set_parameter("lastRegularPaymentDate", value) @property def notional_amount(self): """ The notional amount of the leg at the period start date. Optional. By default 1,000,000 is used. :return: float """ return self._get_parameter("notionalAmount") @notional_amount.setter def notional_amount(self, value): self._set_parameter("notionalAmount", value) @property def notional_ccy(self): """ The ISO code of the notional currency. Mandatory if instrument code or instrument style has not been defined. In case an instrument code/style has been defined, value may comes from the reference data. :return: str """ return self._get_parameter("notionalCcy") @notional_ccy.setter def notional_ccy(self, value): self._set_parameter("notionalCcy", value) @property def payment_business_days(self): """ A list of coma-separated calendar codes to adjust dates (e.g. 'EMU' or 'USA'). Optional. By default the calendar associated to NotionalCcy is used. :return: str """ return self._get_parameter("paymentBusinessDays") @payment_business_days.setter def payment_business_days(self, value): self._set_parameter("paymentBusinessDays", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/cds/_premium_leg_definition.py
0.888861
0.464234
_premium_leg_definition.py
pypi
__all__ = ["CalculationParams"] from ...instrument import InstrumentCalculationParams class CalculationParams(InstrumentCalculationParams): def __init__( self, *, valuation_date=None, market_data_date=None, report_ccy=None, upfront_amount_in_deal_ccy=None, upfront_percent=None, clean_price_percent=None, conventional_spread_bp=None, cash_amount_in_deal_ccy=None, ): super().__init__() self.valuation_date = valuation_date self.market_data_date = market_data_date self.report_ccy = report_ccy self.upfront_amount_in_deal_ccy = upfront_amount_in_deal_ccy self.upfront_percent = upfront_percent self.clean_price_percent = clean_price_percent self.conventional_spread_bp = conventional_spread_bp self.cash_amount_in_deal_ccy = cash_amount_in_deal_ccy @property def market_data_date(self): """ :return: str """ return self._get_parameter("marketDataDate") @market_data_date.setter def market_data_date(self, value): self._set_parameter("marketDataDate", value) @property def report_ccy(self): """ :return: str """ return self._get_parameter("reportCcy") @report_ccy.setter def report_ccy(self, value): self._set_parameter("reportCcy", value) @property def cash_amount_in_deal_ccy(self): """ CashAmountInDealCcy to override and that will be used as pricing analysis input to compute the cds other outputs. Optional. No override is applied by default. Note that only one pricing analysis input should be defined. :return: float """ return self._get_parameter("cashAmountInDealCcy") @cash_amount_in_deal_ccy.setter def cash_amount_in_deal_ccy(self, value): self._set_parameter("cashAmountInDealCcy", value) @property def clean_price_percent(self): """ CleanPricePercent to override and that will be used as pricing analysis input to compute the cds other outputs. Optional. No override is applied by default. Note that only one pricing analysis input should be defined. :return: float """ return self._get_parameter("cleanPricePercent") @clean_price_percent.setter def clean_price_percent(self, value): self._set_parameter("cleanPricePercent", value) @property def conventional_spread_bp(self): """ ConventionalSpreadBp to override and that will be used as pricing analysis input to compute the cds other outputs. Optional. No override is applied by default. Note that only one pricing analysis input should be defined. :return: float """ return self._get_parameter("conventionalSpreadBp") @conventional_spread_bp.setter def conventional_spread_bp(self, value): self._set_parameter("conventionalSpreadBp", value) @property def upfront_amount_in_deal_ccy(self): """ UpfrontAmountInDealCcy to override and that will be used as pricing analysis input to compute the cds other outputs. Optional. No override is applied by default. Note that only one pricing analysis input should be defined. :return: float """ return self._get_parameter("upfrontAmountInDealCcy") @upfront_amount_in_deal_ccy.setter def upfront_amount_in_deal_ccy(self, value): self._set_parameter("upfrontAmountInDealCcy", value) @property def upfront_percent(self): """ UpfrontPercent to override and that will be used as pricing analysis input to compute the cds other outputs. Optional. No override is applied by default. Note that only one pricing analysis input should be defined. :return: float """ return self._get_parameter("upfrontPercent") @upfront_percent.setter def upfront_percent(self, value): self._set_parameter("upfrontPercent", value) @property def valuation_date(self): """ The valuation date for pricing. Optional. If not set the valuation date is equal to MarketDataDate or Today. For assets that contains a settlementConvention, the default valuation date is equal to the settlementdate of the Asset that is usually the TradeDate+SettlementConvention. :return: str """ return self._get_parameter("valuationDate") @valuation_date.setter def valuation_date(self, value): self._set_parameter("valuationDate", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/cds/_cds_pricing_parameters.py
0.91879
0.212886
_cds_pricing_parameters.py
pypi
__all__ = ["Definition"] from ...instrument.instrument_definition import InstrumentDefinition from ...enum_types.stub_rule import StubRule from ...enum_types.buy_sell import BuySell from ...enum_types.adjust_interest_to_payment_date import AdjustInterestToPaymentDate from ...enum_types.day_count_basis import DayCountBasis from ...enum_types.business_day_convention import BusinessDayConvention from ...enum_types.date_rolling_convention import DateRollingConvention from ...enum_types.frequency import Frequency from ...enum_types.index_reset_type import IndexResetType from ...models import AmortizationItem class Definition(InstrumentDefinition): def __init__( self, *, instrument_tag=None, start_date=None, end_date=None, tenor=None, notional_ccy, notional_amount=None, index_name=None, index_tenor=None, interest_payment_frequency=None, interest_calculation_method=None, payment_business_day_convention=None, payment_roll_convention=None, index_reset_frequency=None, index_reset_type=None, index_fixing_lag=None, amortization_schedule=None, adjust_interest_to_payment_date=None, buy_sell, cap_strike_percent, floor_strike_percent=None, index_fixing_ric=None, stub_rule=None, ): super().__init__() self.instrument_tag = instrument_tag self.start_date = start_date self.end_date = end_date self.tenor = tenor self.notional_ccy = notional_ccy self.notional_amount = notional_amount self.index_name = index_name self.index_tenor = index_tenor self.interest_payment_frequency = interest_payment_frequency self.interest_calculation_method = interest_calculation_method self.payment_business_day_convention = payment_business_day_convention self.payment_roll_convention = payment_roll_convention self.index_reset_frequency = index_reset_frequency self.index_reset_type = index_reset_type self.index_fixing_lag = index_fixing_lag self.amortization_schedule = amortization_schedule self.adjust_interest_to_payment_date = adjust_interest_to_payment_date self.buy_sell = buy_sell self.cap_strike_percent = cap_strike_percent self.floor_strike_percent = floor_strike_percent self.index_fixing_ric = index_fixing_ric self.stub_rule = stub_rule @classmethod def get_instrument_type(cls): return "CapFloor" @property def adjust_interest_to_payment_date(self): """ A flag that indicates if the coupon dates are adjusted to the payment dates. Optional. By default 'false' is used. :return: enum AdjustInterestToPaymentDate """ return self._get_enum_parameter( AdjustInterestToPaymentDate, "adjustInterestToPaymentDate" ) @adjust_interest_to_payment_date.setter def adjust_interest_to_payment_date(self, value): self._set_enum_parameter( AdjustInterestToPaymentDate, "adjustInterestToPaymentDate", value ) @property def amortization_schedule(self): """ Definition of amortizations :return: list AmortizationItem """ return self._get_list_parameter(AmortizationItem, "amortizationSchedule") @amortization_schedule.setter def amortization_schedule(self, value): self._set_list_parameter(AmortizationItem, "amortizationSchedule", value) @property def buy_sell(self): """ The side of the deal. Possible values: - Buy - Sell :return: enum BuySell """ return self._get_enum_parameter(BuySell, "buySell") @buy_sell.setter def buy_sell(self, value): self._set_enum_parameter(BuySell, "buySell", value) @property def index_reset_frequency(self): """ The reset frequency in case the leg Type is Float. Optional. By default the IndexTenor is used. :return: enum Frequency """ return self._get_enum_parameter(Frequency, "indexResetFrequency") @index_reset_frequency.setter def index_reset_frequency(self, value): self._set_enum_parameter(Frequency, "indexResetFrequency", value) @property def index_reset_type(self): """ A flag that indicates if the floating rate index is reset before the coupon period starts or at the end of the coupon period. The possible values are: - InAdvance (resets the index before the start of the interest period), - InArrears (resets the index at the end of the interest period). Optional. By default 'InAdvance' is used. :return: enum IndexResetType """ return self._get_enum_parameter(IndexResetType, "indexResetType") @index_reset_type.setter def index_reset_type(self, value): self._set_enum_parameter(IndexResetType, "indexResetType", value) @property def interest_calculation_method(self): """ The Day Count Basis method used to calculate the coupon interest payments. Mandatory. :return: enum DayCountBasis """ return self._get_enum_parameter(DayCountBasis, "interestCalculationMethod") @interest_calculation_method.setter def interest_calculation_method(self, value): self._set_enum_parameter(DayCountBasis, "interestCalculationMethod", value) @property def interest_payment_frequency(self): """ The frequency of the interest payments. Optional if an instrument code/style have been defined : in that case, value comes from reference data. Otherwise, it is mandatory. :return: enum Frequency """ return self._get_enum_parameter(Frequency, "interestPaymentFrequency") @interest_payment_frequency.setter def interest_payment_frequency(self, value): self._set_enum_parameter(Frequency, "interestPaymentFrequency", value) @property def payment_business_day_convention(self): """ The method to adjust dates to a working day. The possible values are: - ModifiedFollowing (adjusts dates according to the Modified Following convention - next business day unless is it goes into the next month, preceeding is used in that case), - NextBusinessDay (adjusts dates according to the Following convention - Next Business Day), - PreviousBusinessDay (adjusts dates according to the Preceeding convention - Previous Business Day), - NoMoving (does not adjust dates), - BbswModifiedFollowing (adjusts dates according to the BBSW Modified Following convention). Optional. In case an instrument code/style has been defined, value comes from bond reference data. Otherwise 'ModifiedFollowing' is used. :return: enum BusinessDayConvention """ return self._get_enum_parameter( BusinessDayConvention, "paymentBusinessDayConvention" ) @payment_business_day_convention.setter def payment_business_day_convention(self, value): self._set_enum_parameter( BusinessDayConvention, "paymentBusinessDayConvention", value ) @property def payment_roll_convention(self): """ The method to adjust payment dates whn they fall at the end of the month (28th of February, 30th, 31st). The possible values are: - Last (For setting the calculated date to the last working day), - Same (For setting the calculated date to the same day . In this latter case, the date may be moved according to the date moving convention if it is a non-working day), - Last28 (For setting the calculated date to the last working day. 28FEB being always considered as the last working day), - Same28 (For setting the calculated date to the same day .28FEB being always considered as the last working day). Optional. By default 'SameDay' is used. :return: enum DateRollingConvention """ return self._get_enum_parameter(DateRollingConvention, "paymentRollConvention") @payment_roll_convention.setter def payment_roll_convention(self, value): self._set_enum_parameter(DateRollingConvention, "paymentRollConvention", value) @property def cap_strike_percent(self): """ Cap leg strike expressed in % :return: float """ return self._get_parameter("capStrikePercent") @cap_strike_percent.setter def cap_strike_percent(self, value): self._set_parameter("capStrikePercent", value) @property def end_date(self): """ The maturity date of the CapFloor :return: str """ return self._get_parameter("endDate") @end_date.setter def end_date(self, value): self._set_parameter("endDate", value) @property def floor_strike_percent(self): """ Floor leg strike expressed in % :return: float """ return self._get_parameter("floorStrikePercent") @floor_strike_percent.setter def floor_strike_percent(self, value): self._set_parameter("floorStrikePercent", value) @property def index_fixing_lag(self): """ Defines the number of working days between the fixing date and the start of the coupon period ('InAdvance') or the end of the coupon period ('InArrears'). Optional. By default 0 is used. :return: int """ return self._get_parameter("indexFixingLag") @index_fixing_lag.setter def index_fixing_lag(self, value): self._set_parameter("indexFixingLag", value) @property def index_fixing_ric(self): """ The RIC that carries the fixing value. This value overrides the RIC associated by default with the IndexName and IndexTenor. Optional. :return: str """ return self._get_parameter("indexFixingRic") @index_fixing_ric.setter def index_fixing_ric(self, value): self._set_parameter("indexFixingRic", value) @property def index_name(self): """ The name of the floating rate index. :return: str """ return self._get_parameter("indexName") @index_name.setter def index_name(self, value): self._set_parameter("indexName", value) @property def index_tenor(self): """ The period code that represents the maturity of the floating rate index. Mandatory when the leg is float. :return: str """ return self._get_parameter("indexTenor") @index_tenor.setter def index_tenor(self, value): self._set_parameter("indexTenor", value) @property def instrument_tag(self): """ User defined string to identify the instrument.It can be used to link output results to the instrument definition. Only alphabetic, numeric and '- _.#=@' characters are supported. Optional. :return: str """ return self._get_parameter("instrumentTag") @instrument_tag.setter def instrument_tag(self, value): self._set_parameter("instrumentTag", value) @property def notional_amount(self): """ The notional amount of the leg at the period start date. Optional. By default 1,000,000 is used. :return: float """ return self._get_parameter("notionalAmount") @notional_amount.setter def notional_amount(self, value): self._set_parameter("notionalAmount", value) @property def notional_ccy(self): """ The ISO code of the notional currency. Mandatory if instrument code or instrument style has not been defined. In case an instrument code/style has been defined, value may comes from the reference data. :return: str """ return self._get_parameter("notionalCcy") @notional_ccy.setter def notional_ccy(self, value): self._set_parameter("notionalCcy", value) @property def start_date(self): """ The option start date :return: str """ return self._get_parameter("startDate") @start_date.setter def start_date(self, value): self._set_parameter("startDate", value) @property def tenor(self): """ Tenor of the option :return: str """ return self._get_parameter("tenor") @tenor.setter def tenor(self, value): self._set_parameter("tenor", value) @property def stub_rule(self): """ The rule that defines whether coupon roll dates are aligned on the maturity or the issue date. The possible values are: - ShortFirstProRata (to create a short period between the start date and the first coupon date, and pay a smaller amount of interest for the short period.All coupon dates are calculated backward from the maturity date), - ShortFirstFull (to create a short period between the start date and the first coupon date, and pay a regular coupon on the first coupon date. All coupon dates are calculated backward from the maturity date), - LongFirstFull (to create a long period between the start date and the second coupon date, and pay a regular coupon on the second coupon date. All coupon dates are calculated backward from the maturity date), - ShortLastProRata (to create a short period between the last payment date and maturity, and pay a smaller amount of interest for the short period. All coupon dates are calculated forward from the start date). This property may also be used in conjunction with firstRegularPaymentDate and lastRegularPaymentDate; in that case the following values can be defined: - Issue (all dates are aligned on the issue date), - Maturity (all dates are aligned on the maturity date). Optional. By default 'Maturity' is used. :return: enum StubRule """ return self._get_enum_parameter(StubRule, "stubRule") @stub_rule.setter def stub_rule(self, value): self._set_enum_parameter(StubRule, "stubRule", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/capfloor/_cap_floor_definition.py
0.88573
0.255059
_cap_floor_definition.py
pypi
__all__ = ["CalculationParams"] from ...instrument import InstrumentCalculationParams from ...enum_types.index_convexity_adjustment_integration_method import ( IndexConvexityAdjustmentIntegrationMethod, ) from ...enum_types.index_convexity_adjustment_method import ( IndexConvexityAdjustmentMethod, ) class CalculationParams(InstrumentCalculationParams): def __init__( self, index_convexity_adjustment_integration_method=None, index_convexity_adjustment_method=None, market_value_in_deal_ccy=None, report_ccy=None, skip_first_cap_floorlet=None, valuation_date=None, ): super().__init__() self.index_convexity_adjustment_integration_method = ( index_convexity_adjustment_integration_method ) self.index_convexity_adjustment_method = index_convexity_adjustment_method self.market_value_in_deal_ccy = market_value_in_deal_ccy self.report_ccy = report_ccy self.skip_first_cap_floorlet = skip_first_cap_floorlet self.valuation_date = valuation_date @property def index_convexity_adjustment_integration_method(self): """ :return: enum IndexConvexityAdjustmentIntegrationMethod """ return self._get_enum_parameter( IndexConvexityAdjustmentIntegrationMethod, "indexConvexityAdjustmentIntegrationMethod", ) @index_convexity_adjustment_integration_method.setter def index_convexity_adjustment_integration_method(self, value): self._set_enum_parameter( IndexConvexityAdjustmentIntegrationMethod, "indexConvexityAdjustmentIntegrationMethod", value, ) @property def index_convexity_adjustment_method(self): """ :return: enum IndexConvexityAdjustmentMethod """ return self._get_enum_parameter( IndexConvexityAdjustmentMethod, "indexConvexityAdjustmentMethod" ) @index_convexity_adjustment_method.setter def index_convexity_adjustment_method(self, value): self._set_enum_parameter( IndexConvexityAdjustmentMethod, "indexConvexityAdjustmentMethod", value ) @property def market_value_in_deal_ccy(self): """ MarketValueInDealCcy to override and that will be used as pricing analysis input to compute VolatilityPercent. Optional. No override is applied by default. Note that Premium takes priority over Volatility input. :return: float """ return self._get_parameter("marketValueInDealCcy") @market_value_in_deal_ccy.setter def market_value_in_deal_ccy(self, value): self._set_parameter("marketValueInDealCcy", value) @property def report_ccy(self): """ Valuation is performed in deal currency. If a report currency is set, valuation is done in that report currency. :return: str """ return self._get_parameter("reportCcy") @report_ccy.setter def report_ccy(self, value): self._set_parameter("reportCcy", value) @property def skip_first_cap_floorlet(self): """ Indicates whether to take in consideration the first caplet :return: bool """ return self._get_parameter("skipFirstCapFloorlet") @skip_first_cap_floorlet.setter def skip_first_cap_floorlet(self, value): self._set_parameter("skipFirstCapFloorlet", value) @property def valuation_date(self): """ The valuation date for pricing. Optional. If not set the valuation date is equal to MarketDataDate or Today. For assets that contains a settlementConvention, the default valuation date is equal to the settlementdate of the Asset that is usually the TradeDate+SettlementConvention. :return: str """ return self._get_parameter("valuationDate") @valuation_date.setter def valuation_date(self, value): self._set_parameter("valuationDate", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/capfloor/_cap_floor_pricing_parameters.py
0.91223
0.330188
_cap_floor_pricing_parameters.py
pypi
__all__ = ["Definition"] from ...instrument.instrument_definition import InstrumentDefinition from ...enum_types.day_count_basis import DayCountBasis from ._repo_underlying_contract import UnderlyingContract class Definition(InstrumentDefinition): def __init__( self, *, instrument_tag=None, underlying_instruments=None, start_date=None, end_date=None, tenor=None, is_coupon_exchanged=None, repo_rate_percent=None, day_count_basis=None, ): """ :param day_count_basis: DayCountBasis :param underlying_instruments: RepoUnderlyingContract :param end_date: str :param instrument_tag: str :param is_coupon_exchanged: bool :param repo_rate_percent: float :param start_date: str :param tenor: str """ super().__init__() self.instrument_tag = instrument_tag self.start_date = start_date self.end_date = end_date self.tenor = tenor self.day_count_basis = day_count_basis self.underlying_instruments = underlying_instruments self.is_coupon_exchanged = is_coupon_exchanged self.repo_rate_percent = repo_rate_percent @classmethod def get_instrument_type(cls): return "Repo" @property def day_count_basis(self): """ Day Count Basis convention to apply to the custom Repo rate. Optional, "Dcb_Actual_360" by default. :return: enum DayCountBasis """ return self._get_enum_parameter(DayCountBasis, "dayCountBasis") @day_count_basis.setter def day_count_basis(self, value): self._set_enum_parameter(DayCountBasis, "dayCountBasis", value) @property def underlying_instruments(self): """ Definition of the underlying instruments. Only Bond Contracts are supported for now, and only one Bond can be used. Mandatory. :return: list RepoUnderlyingContract """ return self._get_list_parameter(UnderlyingContract, "underlyingInstruments") @underlying_instruments.setter def underlying_instruments(self, value): self._set_list_parameter(UnderlyingContract, "underlyingInstruments", value) @property def end_date(self): """ End date of the repo, that means when the borrower repurchases the security back. Either EndDate or Tenor field are requested. :return: str """ return self._get_parameter("endDate") @end_date.setter def end_date(self, value): self._set_parameter("endDate", value) @property def instrument_tag(self): """ User defined string to identify the instrument.It can be used to link output results to the instrument definition. Only alphabetic, numeric and '- _.#=@' characters are supported. Optional. :return: str """ return self._get_parameter("instrumentTag") @instrument_tag.setter def instrument_tag(self, value): self._set_parameter("instrumentTag", value) @property def is_coupon_exchanged(self): """ Specifies whether or not intermediate coupons are exchanged. - CouponExchanged = True to specify that intermediate coupons for the underlying bond (between the repo start date and repo end date) are exchanged between the repo seller and repo buyer. - CouponExchanged = False to specify that no intermediate coupons are exchanged between the repo seller and repo buyer. In this case the repo instrument is like a standard loan with no intermediate coupons; the bond is only used as a warranty in case the money borrower defaults. Optional. True by default, which means coupon exchanged. :return: bool """ return self._get_parameter("isCouponExchanged") @is_coupon_exchanged.setter def is_coupon_exchanged(self, value): self._set_parameter("isCouponExchanged", value) @property def repo_rate_percent(self): """ Custom Repo Rate in percentage. If not provided in the request, it will be computed by interpolating/extrapolating a Repo Curve. Optional. :return: float """ return self._get_parameter("repoRatePercent") @repo_rate_percent.setter def repo_rate_percent(self, value): self._set_parameter("repoRatePercent", value) @property def start_date(self): """ Start date of the repo, that means when the the underlying security is exchanged. Mandatory. :return: str """ return self._get_parameter("startDate") @start_date.setter def start_date(self, value): self._set_parameter("startDate", value) @property def tenor(self): """ Tenor that defines the duration of the Repo in case no EndDate has been provided. In that case, EndDate is computed from StartDate and Tenor. Either EndDate or Tenor field are requested. :return: str """ return self._get_parameter("tenor") @tenor.setter def tenor(self, value): self._set_parameter("tenor", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/repo/_repo_definition.py
0.900857
0.296616
_repo_definition.py
pypi
__all__ = ["UnderlyingCalculationParams"] from ...instrument import InstrumentCalculationParams from ._repo_parameters import RepoParameters from ..bond import CalculationParams as BondPricingParameters class UnderlyingCalculationParams(InstrumentCalculationParams): def __init__( self, repo_parameters=None, pricing_parameters_at_end=None, pricing_parameters_at_start=None, valuation_date=None, ): super().__init__() self.pricing_parameters_at_end = pricing_parameters_at_end self.pricing_parameters_at_start = pricing_parameters_at_start self.repo_parameters = repo_parameters self.valuation_date = valuation_date @property def pricing_parameters_at_end(self): """ Pricing parameters of underlying bond at Repo end date. :return: object BondPricingParameters """ return self._get_object_parameter( BondPricingParameters, "pricingParametersAtEnd" ) @pricing_parameters_at_end.setter def pricing_parameters_at_end(self, value): self._set_object_parameter( BondPricingParameters, "pricingParametersAtEnd", value ) @property def pricing_parameters_at_start(self): """ Pricing parameters of underlying bond at Repo start date. :return: object BondPricingParameters """ return self._get_object_parameter( BondPricingParameters, "pricingParametersAtStart" ) @pricing_parameters_at_start.setter def pricing_parameters_at_start(self, value): self._set_object_parameter( BondPricingParameters, "pricingParametersAtStart", value ) @property def repo_parameters(self): """ Repo parameters to be applied on underlying bond. :return: object RepoParameters """ return self._get_object_parameter(RepoParameters, "repoParameters") @repo_parameters.setter def repo_parameters(self, value): self._set_object_parameter(RepoParameters, "repoParameters", value) @property def valuation_date(self): """ The valuation date for pricing. Optional. If not set the valuation date is equal to MarketDataDate or Today. For assets that contains a settlementConvention, the default valuation date is equal to the settlementdate of the Asset that is usually the TradeDate+SettlementConvention. :return: str """ return self._get_parameter("valuationDate") @valuation_date.setter def valuation_date(self, value): self._set_parameter("valuationDate", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/repo/_repo_underlying_pricing_parameters.py
0.884452
0.281325
_repo_underlying_pricing_parameters.py
pypi
__all__ = ["CalculationParams"] from ...instrument import InstrumentCalculationParams from ...enum_types.repo_curve_type import RepoCurveType class CalculationParams(InstrumentCalculationParams): def __init__( self, valuation_date=None, market_data_date=None, settlement_convention=None, report_ccy=None, coupon_reinvestment_rate_percent=None, repo_curve_type=None, ): super().__init__() self.valuation_date = valuation_date self.market_data_date = market_data_date self.settlement_convention = settlement_convention self.report_ccy = report_ccy self.coupon_reinvestment_rate_percent = coupon_reinvestment_rate_percent self.repo_curve_type = repo_curve_type @property def coupon_reinvestment_rate_percent(self): """ Rate used to reinvest the underlying asset's income. By default 0. :return: str """ return self._get_parameter("couponReinvestmentRatePercent") @coupon_reinvestment_rate_percent.setter def coupon_reinvestment_rate_percent(self, value): self._set_parameter("couponReinvestmentRatePercent", value) @property def settlement_convention(self): """ Settlement tenor for the repo. By default, the rule is that  repoStartDate = valuationDate = marketDataDate + settlementConvention. By default, the settlement convention is equal to the settlement convention of the underlying asset. :return: str """ return self._get_parameter("settlementConvention") @settlement_convention.setter def settlement_convention(self, value): self._set_parameter("settlementConvention", value) @property def report_ccy(self): """ Pricing data is computed in deal currency. If a report currency is set, pricing data is also computed in report currency. By default, Bond notional currency. :return: str """ return self._get_parameter("reportCcy") @report_ccy.setter def report_ccy(self, value): self._set_parameter("reportCcy", value) @property def market_data_date(self): """ The valuation date for pricing. The valuation date is the date where cash flow is discounted. By default, valuationDate is computed from marketDataDate and settlement convention. :return: datetime """ return self._get_parameter("marketDataDate") @market_data_date.setter def market_data_date(self, value): self._set_parameter("marketDataDate", value) @property def repo_curve_type(self): """ Curve used to compute the repo rate. it can be computed using following methods: - RepoCurve : rate is computed by interpolating a repo curve. - DepositCurve : rate is computed by interpolating a deposit curve. - FixingLibor : rate is computed by interpolating libor rates. If no curve can be found, the rate is computed using a deposit curve. :return: enum RepoCurveType """ return self._get_enum_parameter(RepoCurveType, "repoCurveType") @repo_curve_type.setter def repo_curve_type(self, value): self._set_enum_parameter(RepoCurveType, "repoCurveType", value) @property def valuation_date(self): """ The valuation date for pricing. Optional. If not set the valuation date is equal to MarketDataDate or Today. For assets that contains a settlementConvention, the default valuation date is equal to the settlementdate of the Asset that is usually the TradeDate+SettlementConvention. :return: str """ return self._get_parameter("valuationDate") @valuation_date.setter def valuation_date(self, value): self._set_parameter("valuationDate", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/repo/_repo_pricing_parameters.py
0.949165
0.281332
_repo_pricing_parameters.py
pypi
__all__ = ["Definition"] from ._bermudan_swaption_definition import BermudanSwaptionDefinition from ..swap import Definition as SwapDefinition from ...enum_types.buy_sell import BuySell from ...enum_types.call_put import CallPut from ...enum_types.exercise_style import ExerciseStyle from ...enum_types.swaption_settlement_type import SwaptionSettlementType from ...instrument.instrument_definition import InstrumentDefinition class Definition(InstrumentDefinition): def __init__( self, *, instrument_tag=None, end_date=None, tenor=None, bermudan_swaption_definition=None, buy_sell, call_put, exercise_style, settlement_type=None, underlying_definition, strike_percent=None, ): """ :param bermudan_swaption_definition: BermudanSwaptionDefinition :param buy_sell: BuySell :param call_put: CallPut :param exercise_style: ExerciseStyle :param settlement_type: SwaptionSettlementType :param underlying_definition: SwapDefinition :param end_date: str :param instrument_tag: str :param strike_percent: float :param tenor: str """ super().__init__() self.instrument_tag = instrument_tag self.end_date = end_date self.tenor = tenor self.bermudan_swaption_definition = bermudan_swaption_definition self.buy_sell = buy_sell self.call_put = call_put self.exercise_style = exercise_style self.settlement_type = settlement_type self.underlying_definition = underlying_definition self.strike_percent = strike_percent @classmethod def get_instrument_type(cls): return "Swaption" @property def bermudan_swaption_definition(self): """ :return: object BermudanSwaptionDefinition """ return self._get_object_parameter( BermudanSwaptionDefinition, "bermudanSwaptionDefinition" ) @bermudan_swaption_definition.setter def bermudan_swaption_definition(self, value): self._set_object_parameter( BermudanSwaptionDefinition, "bermudanSwaptionDefinition", value ) @property def buy_sell(self): """ The side of the deal. Possible values: - Buy - Sell :return: enum BuySell """ return self._get_enum_parameter(BuySell, "buySell") @buy_sell.setter def buy_sell(self, value): self._set_enum_parameter(BuySell, "buySell", value) @property def call_put(self): """ Tells if the option is a call or a put. Possible values: - Call - Put :return: enum CallPut """ return self._get_enum_parameter(CallPut, "callPut") @call_put.setter def call_put(self, value): self._set_enum_parameter(CallPut, "callPut", value) @property def exercise_style(self): """ EURO or BERM :return: enum ExerciseStyle """ return self._get_enum_parameter(ExerciseStyle, "exerciseStyle") @exercise_style.setter def exercise_style(self, value): self._set_enum_parameter(ExerciseStyle, "exerciseStyle", value) @property def settlement_type(self): """ The settlement type of the option if the option is exercised -Physical -Cash :return: enum SwaptionSettlementType """ return self._get_enum_parameter(SwaptionSettlementType, "settlementType") @settlement_type.setter def settlement_type(self, value): self._set_enum_parameter(SwaptionSettlementType, "settlementType", value) @property def underlying_definition(self): """ The details of the underlying swap :return: object SwapDefinition """ return self._get_object_parameter(SwapDefinition, "underlyingDefinition") @underlying_definition.setter def underlying_definition(self, value): self._set_object_parameter(SwapDefinition, "underlyingDefinition", value) @property def end_date(self): """ Expiry date of the option :return: str """ return self._get_parameter("endDate") @end_date.setter def end_date(self, value): self._set_parameter("endDate", value) @property def instrument_tag(self): """ User defined string to identify the instrument.It can be used to link output results to the instrument definition. Only alphabetic, numeric and '- _.#=@' characters are supported. Optional. :return: str """ return self._get_parameter("instrumentTag") @instrument_tag.setter def instrument_tag(self, value): self._set_parameter("instrumentTag", value) @property def strike_percent(self): """ StrikePercent of the option expressed in % format :return: float """ return self._get_parameter("strikePercent") @strike_percent.setter def strike_percent(self, value): self._set_parameter("strikePercent", value) @property def tenor(self): """ Tenor of the option :return: str """ return self._get_parameter("tenor") @tenor.setter def tenor(self, value): self._set_parameter("tenor", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/swaption/_swaption_definition.py
0.846831
0.204958
_swaption_definition.py
pypi
__all__ = ["CalculationParams"] from . import SwaptionMarketDataRule from ...enum_types.discounting_type import DiscountingType from ...instrument import InstrumentCalculationParams class CalculationParams(InstrumentCalculationParams): def __init__( self, market_data_rule=None, market_value_in_deal_ccy=None, nb_iterations=None, valuation_date=None, ): super().__init__() self.market_data_rule = market_data_rule self.market_value_in_deal_ccy = market_value_in_deal_ccy self.nb_iterations = nb_iterations self.valuation_date = valuation_date @property def market_data_rule(self): """ :return: object SwaptionMarketDataRule """ return self._get_object_parameter(SwaptionMarketDataRule, "marketDataRule") @market_data_rule.setter def market_data_rule(self, value): self._set_object_parameter(SwaptionMarketDataRule, "marketDataRule", value) @property def market_value_in_deal_ccy(self): """ MarketValueInDealCcy to override and that will be used as pricing analysis input to compute VolatilityPercent. Optional. No override is applied by default. Note that Premium takes priority over Volatility input. :return: float """ return self._get_parameter("marketValueInDealCcy") @market_value_in_deal_ccy.setter def market_value_in_deal_ccy(self, value): self._set_parameter("marketValueInDealCcy", value) @property def nb_iterations(self): """ Used for Bermudans and HW1F tree :return: int """ return self._get_parameter("nbIterations") @nb_iterations.setter def nb_iterations(self, value): self._set_parameter("nbIterations", value) @property def valuation_date(self): """ The valuation date for pricing. Optional. If not set the valuation date is equal to MarketDataDate or Today. For assets that contains a settlementConvention, the default valuation date is equal to the settlementdate of the Asset that is usually the TradeDate+SettlementConvention. :return: str """ return self._get_parameter("valuationDate") @valuation_date.setter def valuation_date(self, value): self._set_parameter("valuationDate", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/swaption/_swaption_pricing_parameters.py
0.872944
0.22482
_swaption_pricing_parameters.py
pypi
__all__ = ["Definition"] from refinitiv.dataplatform.tools._common import is_any_defined from refinitiv.dataplatform import RequiredError from ...instrument.instrument_definition import InstrumentDefinition from ._swap_leg_definition import LegDefinition class Definition(InstrumentDefinition): def __init__( self, *, instrument_tag=None, trade_date=None, start_date=None, end_date=None, tenor=None, settlement_ccy=None, is_non_deliverable=None, instrument_code=None, template=None, legs=None, ): super().__init__() error = [] if not is_any_defined(end_date, tenor): error.append("Either the end_date or the tenor must be provided.") if not instrument_code: error.append( "If instrument_code is None, template or legs field must be defined." ) is_any_defined(template, legs) and error.pop() if not template: error.append( "If template is None, instrument_code or legs field must be defined." ) is_any_defined(instrument_code, legs) and error.pop() if not legs: error.append( "If legs is None, the parameter template or instrument_code must be defined." ) is_any_defined(template, instrument_code) and error.pop() if error: raise RequiredError(-1, error) self.instrument_code = instrument_code self.instrument_tag = instrument_tag self.trade_date = trade_date self.start_date = start_date self.end_date = end_date self.legs = legs self.is_non_deliverable = is_non_deliverable self.settlement_ccy = settlement_ccy self.template = template self.tenor = tenor @classmethod def get_instrument_type(cls): return "Swap" @property def legs(self): """ The legs of the Swap to provide a full definition of the swap if no template or instrumentCode have been provided. Optional. Either InstrumentCode, Template, or Legs must be provided. :return: list SwapLegDefinition """ return self._get_list_parameter(LegDefinition, "legs") @legs.setter def legs(self, value): self._set_list_parameter(LegDefinition, "legs", value) @property def end_date(self): """ The maturity date of the swap contract. Mandatory. Either the endDate or the tenor must be provided. :return: str """ return self._get_parameter("endDate") @end_date.setter def end_date(self, value): self._set_parameter("endDate", value) @property def instrument_code(self): """ A swap RIC that is used to retrieve the description of the swap contract. Optional. Either instrumentCode, template, or legs must be provided. :return: str """ return self._get_parameter("instrumentCode") @instrument_code.setter def instrument_code(self, value): self._set_parameter("instrumentCode", value) @property def is_non_deliverable(self): """ A flag that indicates if the swap is non-deliverable. Optional. By defaults 'false'. :return: bool """ return self._get_parameter("isNonDeliverable") @is_non_deliverable.setter def is_non_deliverable(self, value): self._set_parameter("isNonDeliverable", value) @property def settlement_ccy(self): """ For non-deliverable instrument, the ISO code of the settlement currency. Optional. By priority order : 'USD' if one leg denominated in USD; 'EUR' if one leg is denominated in EUR; the paidLegCcy. :return: str """ return self._get_parameter("settlementCcy") @settlement_ccy.setter def settlement_ccy(self, value): self._set_parameter("settlementCcy", value) @property def start_date(self): """ The date the swap starts accruing interest. Its effective date. Optional. By default it is derived from the TradeDate and the day to spot convention of the contract currency. :return: str """ return self._get_parameter("startDate") @start_date.setter def start_date(self, value): self._set_parameter("startDate", value) @property def template(self): """ A reference to a common swap contract. Optional. Either InstrumentCode, Template, or Legs must be provided. :return: str """ return self._get_parameter("template") @template.setter def template(self, value): self._set_parameter("template", value) @property def tenor(self): """ The period code that represents the time between the start date and end date the contract. Mandatory. Either the endDate or the tenor must be provided. :return: str """ return self._get_parameter("tenor") @tenor.setter def tenor(self, value): self._set_parameter("tenor", value) @property def trade_date(self): """ The date the swap contract was created. Optional. By default the valuation date. :return: str """ return self._get_parameter("tradeDate") @trade_date.setter def trade_date(self, value): self._set_parameter("tradeDate", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/swap/_swap_definition.py
0.788543
0.241601
_swap_definition.py
pypi
__all__ = ["LegDefinition"] from refinitiv.dataplatform import RequiredError from refinitiv.dataplatform.tools._common import is_any_defined from ...enum_types.adjust_interest_to_payment_date import AdjustInterestToPaymentDate from ...enum_types.day_count_basis import DayCountBasis from ...enum_types.common_tools import is_enum_equal from ...enum_types.business_day_convention import BusinessDayConvention from ...enum_types.direction import Direction from ...enum_types.frequency import Frequency from ...enum_types.index_compounding_method import IndexCompoundingMethod from ...enum_types.index_reset_type import IndexResetType from ...enum_types.interest_type import InterestType from ...enum_types.notional_exchange import NotionalExchange from ...enum_types.date_rolling_convention import DateRollingConvention from ...enum_types.stub_rule import StubRule from ...instrument._definition import ObjectDefinition from ...models import AmortizationItem class LegDefinition(ObjectDefinition): def __init__( self, *, leg_tag=None, direction, interest_type, notional_ccy, notional_amount=None, fixed_rate_percent=None, index_name=None, index_tenor=None, index_fixing_ric=None, spread_bp=None, interest_payment_frequency, interest_calculation_method, accrued_calculation_method=None, payment_business_day_convention=None, payment_roll_convention=None, index_reset_frequency=None, index_reset_type=None, index_fixing_lag=None, first_regular_payment_date=None, last_regular_payment_date=None, amortization_schedule=None, payment_business_days=None, notional_exchange=None, adjust_interest_to_payment_date=None, index_compounding_method=None, interest_payment_delay=None, stub_rule=None, ): super().__init__() error = [] if is_enum_equal(InterestType.FLOAT, interest_type): error.append("index_name required, if the leg is float.") is_any_defined(index_name) and error.pop() if error: raise RequiredError(-1, error) self.leg_tag = leg_tag self.direction = direction self.interest_type = interest_type self.notional_ccy = notional_ccy self.notional_amount = notional_amount self.fixed_rate_percent = fixed_rate_percent self.index_name = index_name self.index_tenor = index_tenor self.spread_bp = spread_bp self.interest_payment_frequency = interest_payment_frequency self.interest_calculation_method = interest_calculation_method self.accrued_calculation_method = accrued_calculation_method self.payment_business_day_convention = payment_business_day_convention self.payment_roll_convention = payment_roll_convention self.index_reset_frequency = index_reset_frequency self.index_reset_type = index_reset_type self.index_fixing_lag = index_fixing_lag self.first_regular_payment_date = first_regular_payment_date self.last_regular_payment_date = last_regular_payment_date self.amortization_schedule = amortization_schedule self.payment_business_days = payment_business_days self.notional_exchange = notional_exchange self.adjust_interest_to_payment_date = adjust_interest_to_payment_date self.index_compounding_method = index_compounding_method self.interest_payment_delay = interest_payment_delay self.stub_rule = stub_rule self.index_fixing_ric = index_fixing_ric # self.fixed_rate_percent_schedule = fixed_rate_percent_schedule @property def accrued_calculation_method(self): """ The Day Count Basis method used to calculate the accrued interest payments. Optional. By default, the same value than InterestCalculationMethod is used. :return: enum DayCountBasis """ return self._get_enum_parameter(DayCountBasis, "accruedCalculationMethod") @accrued_calculation_method.setter def accrued_calculation_method(self, value): self._set_enum_parameter(DayCountBasis, "accruedCalculationMethod", value) @property def adjust_interest_to_payment_date(self): """ A flag that indicates if the coupon dates are adjusted to the payment dates. Optional. By default 'false' is used. :return: enum AdjustInterestToPaymentDate """ return self._get_enum_parameter( AdjustInterestToPaymentDate, "adjustInterestToPaymentDate" ) @adjust_interest_to_payment_date.setter def adjust_interest_to_payment_date(self, value): self._set_enum_parameter( AdjustInterestToPaymentDate, "adjustInterestToPaymentDate", value ) @property def amortization_schedule(self): """ Definition of amortizations :return: list AmortizationItem """ return self._get_list_parameter(AmortizationItem, "amortizationSchedule") @amortization_schedule.setter def amortization_schedule(self, value): self._set_list_parameter(AmortizationItem, "amortizationSchedule", value) @property def direction(self): """ The direction of the leg. the possible values are: 'Paid' (the cash flows of the leg are paid to the counterparty), 'Received' (the cash flows of the leg are received from the counterparty). Optional for a single leg instrument (like a bond), in that case default value is Received. It is mandatory for a multi-instrument leg instrument (like Swap or CDS leg). :return: enum Direction """ return self._get_enum_parameter(Direction, "direction") @direction.setter def direction(self, value): self._set_enum_parameter(Direction, "direction", value) @property def index_compounding_method(self): """ A flag that defines how the coupon rate is calculated from the reset floating rates when the reset frequency is higher than the interest payment frequency (e.g. daily index reset with quarterly interest payment). The possible values are: - Compounded (uses the compounded average rate from multiple fixings), - Average (uses the arithmetic average rate from multiple fixings), - Constant (uses the last published rate among multiple fixings), - AdjustedCompounded (uses Chinese 7-day repo fixing), - MexicanCompounded (uses Mexican Bremse fixing). Optional. By default 'Constant' is used. :return: enum IndexCompoundingMethod """ return self._get_enum_parameter( IndexCompoundingMethod, "indexCompoundingMethod" ) @index_compounding_method.setter def index_compounding_method(self, value): self._set_enum_parameter( IndexCompoundingMethod, "indexCompoundingMethod", value ) @property def index_reset_frequency(self): """ The reset frequency in case the leg Type is Float. Optional. By default the IndexTenor is used. :return: enum Frequency """ return self._get_enum_parameter(Frequency, "indexResetFrequency") @index_reset_frequency.setter def index_reset_frequency(self, value): self._set_enum_parameter(Frequency, "indexResetFrequency", value) @property def index_reset_type(self): """ A flag that indicates if the floating rate index is reset before the coupon period starts or at the end of the coupon period. The possible values are: - InAdvance (resets the index before the start of the interest period), - InArrears (resets the index at the end of the interest period). Optional. By default 'InAdvance' is used. :return: enum IndexResetType """ return self._get_enum_parameter(IndexResetType, "indexResetType") @index_reset_type.setter def index_reset_type(self, value): self._set_enum_parameter(IndexResetType, "indexResetType", value) @property def interest_calculation_method(self): """ The Day Count Basis method used to calculate the coupon interest payments. Mandatory. :return: enum DayCountBasis """ return self._get_enum_parameter(DayCountBasis, "interestCalculationMethod") @interest_calculation_method.setter def interest_calculation_method(self, value): self._set_enum_parameter(DayCountBasis, "interestCalculationMethod", value) @property def interest_payment_frequency(self): """ The frequency of the interest payments. Optional if an instrument code/style have been defined : in that case, value comes from reference data. Otherwise, it is mandatory. :return: enum Frequency """ return self._get_enum_parameter(Frequency, "interestPaymentFrequency") @interest_payment_frequency.setter def interest_payment_frequency(self, value): self._set_enum_parameter(Frequency, "interestPaymentFrequency", value) @property def interest_type(self): """ A flag that indicates whether the leg is fixed or float. Possible values are: - 'Fixed' (the leg has a fixed coupon), - 'Float' (the leg has a floating rate index). Mandatory. :return: enum InterestType """ return self._get_enum_parameter(InterestType, "interestType") @interest_type.setter def interest_type(self, value): self._set_enum_parameter(InterestType, "interestType", value) @property def notional_exchange(self): """ A flag that defines whether and when notional payments occurs. The possible values are: - None (means that the notional is not included in the cash flow schedule), - Start (means that the counterparties exchange the notional on the swap start date, before the first interest payment), - End (means that the counterparties exchange the notional on the swap maturity date, in addition to the last interest payment), - Both (combines the payments of Start Only and End Only), - EndAdjustment. :return: enum NotionalExchange """ return self._get_enum_parameter(NotionalExchange, "notionalExchange") @notional_exchange.setter def notional_exchange(self, value): self._set_enum_parameter(NotionalExchange, "notionalExchange", value) @property def payment_business_day_convention(self): """ The method to adjust dates to a working day. The possible values are: - ModifiedFollowing (adjusts dates according to the Modified Following convention - next business day unless is it goes into the next month, preceeding is used in that case), - NextBusinessDay (adjusts dates according to the Following convention - Next Business Day), - PreviousBusinessDay (adjusts dates according to the Preceeding convention - Previous Business Day), - NoMoving (does not adjust dates), - BbswModifiedFollowing (adjusts dates according to the BBSW Modified Following convention). Optional. In case an instrument code/style has been defined, value comes from bond reference data. Otherwise 'ModifiedFollowing' is used. :return: enum BusinessDayConvention """ return self._get_enum_parameter( BusinessDayConvention, "paymentBusinessDayConvention" ) @payment_business_day_convention.setter def payment_business_day_convention(self, value): self._set_enum_parameter( BusinessDayConvention, "paymentBusinessDayConvention", value ) @property def payment_roll_convention(self): """ The method to adjust payment dates whn they fall at the end of the month (28th of February, 30th, 31st). The possible values are: - Last (For setting the calculated date to the last working day), - Same (For setting the calculated date to the same day . In this latter case, the date may be moved according to the date moving convention if it is a non-working day), - Last28 (For setting the calculated date to the last working day. 28FEB being always considered as the last working day), - Same28 (For setting the calculated date to the same day .28FEB being always considered as the last working day). Optional. By default 'SameDay' is used. :return: enum DateRollingConvention """ return self._get_enum_parameter(DateRollingConvention, "paymentRollConvention") @payment_roll_convention.setter def payment_roll_convention(self, value): self._set_enum_parameter(DateRollingConvention, "paymentRollConvention", value) @property def stub_rule(self): """ The rule that defines whether coupon roll dates are aligned on the maturity or the issue date. The possible values are: - ShortFirstProRata (to create a short period between the start date and the first coupon date, and pay a smaller amount of interest for the short period.All coupon dates are calculated backward from the maturity date), - ShortFirstFull (to create a short period between the start date and the first coupon date, and pay a regular coupon on the first coupon date. All coupon dates are calculated backward from the maturity date), - LongFirstFull (to create a long period between the start date and the second coupon date, and pay a regular coupon on the second coupon date. All coupon dates are calculated backward from the maturity date), - ShortLastProRata (to create a short period between the last payment date and maturity, and pay a smaller amount of interest for the short period. All coupon dates are calculated forward from the start date). This property may also be used in conjunction with firstRegularPaymentDate and lastRegularPaymentDate; in that case the following values can be defined: - Issue (all dates are aligned on the issue date), - Maturity (all dates are aligned on the maturity date). Optional. By default 'Maturity' is used. :return: enum StubRule """ return self._get_enum_parameter(StubRule, "stubRule") @stub_rule.setter def stub_rule(self, value): self._set_enum_parameter(StubRule, "stubRule", value) @property def first_regular_payment_date(self): """ The first regular coupon payment date for leg with an odd first coupon. Optional. :return: str """ return self._get_parameter("firstRegularPaymentDate") @first_regular_payment_date.setter def first_regular_payment_date(self, value): self._set_parameter("firstRegularPaymentDate", value) @property def fixed_rate_percent(self): """ The fixed coupon rate in percentage. It is mandatory in case of a single leg instrument. Otherwise, in case of multi leg instrument, it can be computed as the Par rate. :return: float """ return self._get_parameter("fixedRatePercent") @fixed_rate_percent.setter def fixed_rate_percent(self, value): self._set_parameter("fixedRatePercent", value) @property def index_fixing_lag(self): """ Defines the number of working days between the fixing date and the start of the coupon period ('InAdvance') or the end of the coupon period ('InArrears'). Optional. By default 0 is used. :return: int """ return self._get_parameter("indexFixingLag") @index_fixing_lag.setter def index_fixing_lag(self, value): self._set_parameter("indexFixingLag", value) @property def index_fixing_ric(self): """ The RIC that carries the fixing value. This value overrides the RIC associated by default with the IndexName and IndexTenor. Optional. :return: str """ return self._get_parameter("indexFixingRic") @index_fixing_ric.setter def index_fixing_ric(self, value): self._set_parameter("indexFixingRic", value) @property def index_name(self): """ The name of the floating rate index. Mandatory when the leg is float. :return: str """ return self._get_parameter("indexName") @index_name.setter def index_name(self, value): self._set_parameter("indexName", value) @property def index_tenor(self): """ The period code that represents the maturity of the floating rate index. Mandatory when the leg is float. :return: str """ return self._get_parameter("indexTenor") @index_tenor.setter def index_tenor(self, value): self._set_parameter("indexTenor", value) @property def interest_payment_delay(self): """ The number of working days between the end of coupon period and the actual interest payment date. Optional. By default no delay (0) is applied. :return: int """ return self._get_parameter("interestPaymentDelay") @interest_payment_delay.setter def interest_payment_delay(self, value): self._set_parameter("interestPaymentDelay", value) @property def last_regular_payment_date(self): """ The last regular coupon payment date for leg with an odd last coupon. Optional. :return: str """ return self._get_parameter("lastRegularPaymentDate") @last_regular_payment_date.setter def last_regular_payment_date(self, value): self._set_parameter("lastRegularPaymentDate", value) @property def leg_tag(self): """ A user provided string to identify the leg that will also be part of the response. Optional. :return: str """ return self._get_parameter("legTag") @leg_tag.setter def leg_tag(self, value): self._set_parameter("legTag", value) @property def notional_amount(self): """ The notional amount of the leg at the period start date. Optional. By default 1,000,000 is used. :return: float """ return self._get_parameter("notionalAmount") @notional_amount.setter def notional_amount(self, value): self._set_parameter("notionalAmount", value) @property def notional_ccy(self): """ The ISO code of the notional currency. Mandatory if instrument code or instrument style has not been defined. In case an instrument code/style has been defined, value may comes from the reference data. :return: str """ return self._get_parameter("notionalCcy") @notional_ccy.setter def notional_ccy(self, value): self._set_parameter("notionalCcy", value) @property def payment_business_days(self): """ A list of coma-separated calendar codes to adjust dates (e.g. 'EMU' or 'USA'). Optional. By default the calendar associated to NotionalCcy is used. :return: str """ return self._get_parameter("paymentBusinessDays") @payment_business_days.setter def payment_business_days(self, value): self._set_parameter("paymentBusinessDays", value) @property def spread_bp(self): """ The spread in basis point that is added to the floating rate index index value. Optional. By default 0 is used. :return: float """ return self._get_parameter("spreadBp") @spread_bp.setter def spread_bp(self, value): self._set_parameter("spreadBp", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/swap/_swap_leg_definition.py
0.812756
0.270095
_swap_leg_definition.py
pypi
__all__ = ["CalculationParams"] from ...instrument import InstrumentCalculationParams from ...enum_types.index_convexity_adjustment_method import ( IndexConvexityAdjustmentMethod, ) from ...enum_types.index_convexity_adjustment_integration_method import ( IndexConvexityAdjustmentIntegrationMethod, ) class CalculationParams(InstrumentCalculationParams): def __init__( self, valuation_date=None, report_ccy=None, market_data_date=None, index_convexity_adjustment_integration_method=None, index_convexity_adjustment_method=None, discounting_ccy=None, discounting_tenor=None, market_value_in_deal_ccy=None, ): super().__init__() self.valuation_date = valuation_date self.report_ccy = report_ccy self.market_data_date = market_data_date self.market_value_in_deal_ccy = market_value_in_deal_ccy self.index_convexity_adjustment_integration_method = ( index_convexity_adjustment_integration_method ) self.index_convexity_adjustment_method = index_convexity_adjustment_method self.discounting_ccy = discounting_ccy self.discounting_tenor = discounting_tenor @property def valuation_date(self): """ The valuation date for pricing. Optional. If not set the valuation date is equal to MarketDataDate or Today. For assets that contains a settlementConvention, the default valuation date is equal to the settlementdate of the Asset that is usually the TradeDate+SettlementConvention. :return: str """ return self._get_parameter("valuationDate") @valuation_date.setter def valuation_date(self, value): self._set_parameter("valuationDate", value) @property def report_ccy(self): """ :return: str """ return self._get_parameter("reportCcy") @report_ccy.setter def report_ccy(self, value): self._set_parameter("reportCcy", value) @property def market_data_date(self): """ :return: str """ return self._get_parameter("marketDataDate") @market_data_date.setter def market_data_date(self, value): self._set_parameter("marketDataDate", value) @property def market_value_in_deal_ccy(self): """ :return: float """ return self._get_parameter("marketValueInDealCcy") @market_value_in_deal_ccy.setter def market_value_in_deal_ccy(self, value): self._set_parameter("marketValueInDealCcy", value) @property def discounting_tenor(self): """ :return: str """ return self._get_parameter("discountingTenor") @discounting_tenor.setter def discounting_tenor(self, value): self._set_parameter("discountingTenor", value) @property def discounting_ccy(self): """ :return: str """ return self._get_parameter("discountingCcy") @discounting_ccy.setter def discounting_ccy(self, value): self._set_parameter("discountingCcy", value) @property def index_convexity_adjustment_integration_method(self): """ Integration method used for static replication method. :return: enum IndexConvexityAdjustmentIntegrationMethod """ return self._get_enum_parameter( IndexConvexityAdjustmentIntegrationMethod, "indexConvexityAdjustmentIntegrationMethod", ) @index_convexity_adjustment_integration_method.setter def index_convexity_adjustment_integration_method(self, value): self._set_enum_parameter( IndexConvexityAdjustmentIntegrationMethod, "indexConvexityAdjustmentIntegrationMethod", value, ) @property def index_convexity_adjustment_method(self): """ Convexity adjustment type for CMS swaps and Libor in arrears swaps. Values can be: None, BlackScholes, LiborSwapMethod, or Replication :return: enum IndexConvexityAdjustmentMethod """ return self._get_enum_parameter( IndexConvexityAdjustmentMethod, "indexConvexityAdjustmentMethod" ) @index_convexity_adjustment_method.setter def index_convexity_adjustment_method(self, value): self._set_enum_parameter( IndexConvexityAdjustmentMethod, "indexConvexityAdjustmentMethod", value )
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/swap/_swap_pricing_parameters.py
0.923017
0.267713
_swap_pricing_parameters.py
pypi
__all__ = ["Definition"] from ...instrument.instrument_definition import InstrumentDefinition from ...enum_types.fx_cross_type import FxCrossType from ._fx_cross_leg_definition import LegDefinition class Definition(InstrumentDefinition): def __init__( self, *, instrument_tag=None, fx_cross_code=None, fx_cross_type=None, traded_cross_rate=None, traded_swap_points=None, reference_spot_rate=None, reference_swap_points=None, ndf_fixing_settlement_ccy=None, legs=None, ): super().__init__() self.instrument_tag = instrument_tag self.fx_cross_code = fx_cross_code self.fx_cross_type = fx_cross_type self.traded_cross_rate = traded_cross_rate self.traded_swap_points = traded_swap_points self.reference_spot_rate = reference_spot_rate self.reference_swap_points = reference_swap_points self.ndf_fixing_settlement_ccy = ndf_fixing_settlement_ccy self.legs = legs @classmethod def get_instrument_type(cls): return "FxCross" @property def fx_cross_type(self): """ The type of the Fx Cross instrument : 'FxSpot', 'FxForward', 'FxNonDeliverableForward', 'FxSwap', 'MultiLeg' or 'FxForwardForward'. Mandatory. :return: enum FxCrossType """ return self._get_enum_parameter(FxCrossType, "fxCrossType") @fx_cross_type.setter def fx_cross_type(self, value): self._set_enum_parameter(FxCrossType, "fxCrossType", value) @property def legs(self): """ Extra parameters to describe further the contract. 1 leg is mandatory for Forwards and NDFs contracts. 2 legs are required for Swaps, and FwdFwdSwaps contracts. Optional for Spot contracts. :return: list FxCrossLegDefinition """ return self._get_list_parameter(LegDefinition, "legs") @legs.setter def legs(self, value): self._set_list_parameter(LegDefinition, "legs", value) @property def fx_cross_code(self): """ The ISO code of the cross currency (e.g. 'EURCHF'). Mandatory. :return: str """ return self._get_parameter("fxCrossCode") @fx_cross_code.setter def fx_cross_code(self, value): self._set_parameter("fxCrossCode", value) @property def instrument_tag(self): """ User defined string to identify the instrument.It can be used to link output results to the instrument definition. Only alphabetic, numeric and '- _.#=@' characters are supported. Optional. :return: str """ return self._get_parameter("instrumentTag") @instrument_tag.setter def instrument_tag(self, value): self._set_parameter("instrumentTag", value) @property def ndf_fixing_settlement_ccy(self): """ In case of a NDF contract, the ISO code of the settlement currency (e.g. 'EUR' ). Optional. :return: str """ return self._get_parameter("ndfFixingSettlementCcy") @ndf_fixing_settlement_ccy.setter def ndf_fixing_settlement_ccy(self, value): self._set_parameter("ndfFixingSettlementCcy", value) @property def reference_spot_rate(self): """ Contractual Spot Rate the counterparties agreed. It is used to compute the TradedCrossRate as 'ReferenceSpotRate + TradedSwapPoints / FxSwapPointScalingFactor'. In the case of a "FxSwap" contract, it is also used to compute nearLeg.ContraAmount from nearLeg.DealAmount as 'nearLeg.ContraAmount = nearLeg.DealAmount * (ReferenceSpotRate / FxCrossScalingFactor)'. Optional. Default value is null. In that case TradedCrossRate and Leg ContraAmount may not be computed. :return: float """ return self._get_parameter("referenceSpotRate") @reference_spot_rate.setter def reference_spot_rate(self, value): self._set_parameter("referenceSpotRate", value) @property def traded_cross_rate(self): """ The contractual exchange rate agreed by the two counterparties. It is used to compute the ContraAmount if the amount is not filled. In the case of a 'FxForward' and 'FxNonDeliverableForward' contract : ContraAmount is computed as 'DealAmount x TradedCrossRate / FxCrossScalingFactor'. In the case of a 'FxSwap' contract : farLeg.ContraAmount is computed as 'nearLeg.DealAmount x TradedCrossRate / FxCrossScalingFactor'. Optional. Default value is null. It emans that if both ContraAmount and TradedCrossRate are sot set, market value cannot be computed. :return: float """ return self._get_parameter("tradedCrossRate") @traded_cross_rate.setter def traded_cross_rate(self, value): self._set_parameter("tradedCrossRate", value) @property def traded_swap_points(self): """ Contractual forward points agreed by the two counterparties. It is used to compute the TradedCrossRate as 'ReferenceSpotRate + TradedSwapPoints / FxSwapPointScalingFactor'. Optional. Default value is null. In that case TradedCrossRate and Leg ContraAmount may not be computed. :return: float """ return self._get_parameter("tradedSwapPoints") @traded_swap_points.setter def traded_swap_points(self, value): self._set_parameter("tradedSwapPoints", value) @property def reference_swap_points(self): """ This is the contractual swap points the counterparties agreed to use to calculate the outright, in case of a Forward/Forward contract. :return: float """ return self._get_parameter("referenceSwapPoints") @reference_swap_points.setter def reference_swap_points(self, value): self._set_parameter("referenceSwapPoints", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/cross/_fx_cross_definition.py
0.920101
0.327023
_fx_cross_definition.py
pypi
__all__ = ["Definition"] from ...instrument.instrument_definition import InstrumentDefinition from ...enum_types.day_count_basis import DayCountBasis from ...enum_types.business_day_convention import BusinessDayConvention from ...enum_types.date_rolling_convention import DateRollingConvention class Definition(InstrumentDefinition): def __init__( self, *, instrument_tag=None, instrument_code=None, start_date=None, end_date=None, tenor, notional_ccy, notional_amount=None, fixed_rate_percent=None, payment_business_day_convention=None, payment_roll_convention=None, year_basis=None, calendar=None, ): super().__init__() self.instrument_tag = instrument_tag self.instrument_code = instrument_code self.start_date = start_date self.end_date = end_date self.tenor = tenor self.notional_ccy = notional_ccy self.notional_amount = notional_amount self.fixed_rate_percent = fixed_rate_percent self.payment_business_day_convention = payment_business_day_convention self.payment_roll_convention = payment_roll_convention self.year_basis = year_basis self.calendar = calendar @classmethod def get_instrument_type(cls): return "TermDeposit" @property def payment_business_day_convention(self): """ The method to adjust dates to a working day. The possible values are: - ModifiedFollowing (adjusts dates according to the Modified Following convention - next business day unless is it goes into the next month, preceeding is used in that case), - NextBusinessDay (adjusts dates according to the Following convention - Next Business Day), - PreviousBusinessDay (adjusts dates according to the Preceeding convention - Previous Business Day), - NoMoving (does not adjust dates), - BbswModifiedFollowing (adjusts dates according to the BBSW Modified Following convention). Optional. In case an instrument code/style has been defined, value comes from bond reference data. Otherwise 'ModifiedFollowing' is used. :return: enum BusinessDayConvention """ return self._get_enum_parameter( BusinessDayConvention, "paymentBusinessDayConvention" ) @payment_business_day_convention.setter def payment_business_day_convention(self, value): self._set_enum_parameter( BusinessDayConvention, "paymentBusinessDayConvention", value ) @property def payment_roll_convention(self): """ Method to adjust payment dates when they fall at the end of the month (28th of February, 30th, 31st). The possible values are: - Last (For setting the calculated date to the last working day), - Same (For setting the calculated date to the same day . In this latter case, the date may be moved according to the date moving convention if it is a non-working day), - Last28 (For setting the calculated date to the last working day. 28FEB being always considered as the last working day), - Same28 (For setting the calculated date to the same day .28FEB being always considered as the last working day). Optional. In case an instrument code has been defined, value comes from bond reference data. Otherwise, 'SameDay' is used. :return: enum DateRollingConvention """ return self._get_enum_parameter(DateRollingConvention, "paymentRollConvention") @payment_roll_convention.setter def payment_roll_convention(self, value): self._set_enum_parameter(DateRollingConvention, "paymentRollConvention", value) @property def year_basis(self): """ The Day Count Basis method used to calculate the interest payments. Dcb_Actual_365 used by default. :return: enum DayCountBasis """ return self._get_enum_parameter(DayCountBasis, "yearBasis") @year_basis.setter def year_basis(self, value): self._set_enum_parameter(DayCountBasis, "yearBasis", value) @property def calendar(self): """ Calendar used to adjust deposit duration calculation. By default the calendar corresponding to notional currency is used. :return: str """ return self._get_parameter("calendar") @calendar.setter def calendar(self, value): self._set_parameter("calendar", value) @property def end_date(self): """ The maturity date of the term deposit contract. Mandatory. Either the endDate or the tenor must be provided. :return: str """ return self._get_parameter("endDate") @end_date.setter def end_date(self, value): self._set_parameter("endDate", value) @property def fixed_rate_percent(self): """ Fixed interest rate percent to be applied for notional by deal terms. E.g. "10" means 10% :return: float """ return self._get_parameter("fixedRatePercent") @fixed_rate_percent.setter def fixed_rate_percent(self, value): self._set_parameter("fixedRatePercent", value) @property def instrument_code(self): """ Code to define the term deposit instrument. For the moment, only RICs for CDs and Wholesales deposits are supported, with deposit code (ex:"EUR1MD="). :return: str """ return self._get_parameter("instrumentCode") @instrument_code.setter def instrument_code(self, value): self._set_parameter("instrumentCode", value) @property def instrument_tag(self): """ User defined string to identify the instrument. It can be used to link output results to the instrument definition. Only alphabetic, numeric and '- _.#=@' characters are supported. Optional. :return: str """ return self._get_parameter("instrumentTag") @instrument_tag.setter def instrument_tag(self, value): self._set_parameter("instrumentTag", value) @property def notional_amount(self): """ The notional amount of the term deposit at the start date. Optional. By default 1,000,000 is used. :return: float """ return self._get_parameter("notionalAmount") @notional_amount.setter def notional_amount(self, value): self._set_parameter("notionalAmount", value) @property def notional_ccy(self): """ The ISO code of the notional currency. Should be explicitly specified if InstrumentCode hasn't been specified. May be retrieved from reference data. :return: str """ return self._get_parameter("notionalCcy") @notional_ccy.setter def notional_ccy(self, value): self._set_parameter("notionalCcy", value) @property def start_date(self): """ The date the term deposit starts accruing interest. Its effective date. Optional. By default it is derived from the ValuationDate and the day to spot convention of the contract currency. :return: str """ return self._get_parameter("startDate") @start_date.setter def start_date(self, value): self._set_parameter("startDate", value) @property def tenor(self): """ The period code that represents the time between the start date and end date of the contract. Mandatory if instrumentCode is null. Either the endDate or the tenor must be provided. Sample value: 1M :return: str """ return self._get_parameter("tenor") @tenor.setter def tenor(self, value): self._set_parameter("tenor", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/term_deposit/_term_deposit_definition.py
0.922587
0.396127
_term_deposit_definition.py
pypi
__all__ = ["FxPoint"] from ...instrument._definition import ObjectDefinition from ...enum_types.status import Status class FxPoint(ObjectDefinition): def __init__( self, bid=None, ask=None, mid=None, status=None, instrument=None, processing_information=None, spot_decimals=None, ): super().__init__() self.bid = bid self.ask = ask self.mid = mid self.status = status self.instrument = instrument self.processing_information = processing_information self.spot_decimals = spot_decimals @property def status(self): """ :return: enum Status """ return self._get_enum_parameter(Status, "status") @status.setter def status(self, value): self._set_enum_parameter(Status, "status", value) @property def ask(self): """ :return: float """ return self._get_parameter("ask") @ask.setter def ask(self, value): self._set_parameter("ask", value) @property def bid(self): """ :return: float """ return self._get_parameter("bid") @bid.setter def bid(self, value): self._set_parameter("bid", value) @property def instrument(self): """ :return: str """ return self._get_parameter("instrument") @instrument.setter def instrument(self, value): self._set_parameter("instrument", value) @property def mid(self): """ :return: float """ return self._get_parameter("mid") @mid.setter def mid(self, value): self._set_parameter("mid", value) @property def processing_information(self): """ :return: str """ return self._get_parameter("processingInformation") @processing_information.setter def processing_information(self, value): self._set_parameter("processingInformation", value) @property def spot_decimals(self): """ :return: str """ return self._get_parameter("spotDecimals") @spot_decimals.setter def spot_decimals(self, value): self._set_parameter("spotDecimals", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/option/_fx_point.py
0.903422
0.164953
_fx_point.py
pypi
__all__ = ["EtiFixingInfo"] from ._abstracted_class import FixingInfo from ...enum_types.fixing_frequency import FixingFrequency from ...enum_types.average_type import AverageType class EtiFixingInfo(FixingInfo): def __init__( self, average_so_far=None, average_type=None, fixing_frequency=None, fixing_calendar=None, fixing_end_date=None, fixing_start_date=None, include_holidays=None, include_week_ends=None, ): super().__init__() self.average_so_far = average_so_far self.average_type = average_type self.fixing_frequency = fixing_frequency self.fixing_calendar = fixing_calendar self.fixing_end_date = fixing_end_date self.fixing_start_date = fixing_start_date self.include_holidays = include_holidays self.include_week_ends = include_week_ends @property def average_so_far(self): """ The value of the AverageType :return: float """ return self._get_parameter("averageSoFar") @average_so_far.setter def average_so_far(self, value): self._set_parameter("averageSoFar", value) @property def average_type(self): """ The type of average used to compute. Possible values: - ArithmeticRate - ArithmeticStrike - GeometricRate - GeometricStrike :return: enum AverageType """ return self._get_enum_parameter(AverageType, "averageType") @average_type.setter def average_type(self, value): self._set_enum_parameter(AverageType, "averageType", value) @property def fixing_frequency(self): """ The fixing's frequency. Possible values: - Daily - Weekly - BiWeekly - Monthly - Quaterly - SemiAnnual - Annual :return: enum FixingFrequency """ return self._get_enum_parameter(FixingFrequency, "fixingFrequency") @fixing_frequency.setter def fixing_frequency(self, value): self._set_enum_parameter(FixingFrequency, "fixingFrequency", value) @property def fixing_calendar(self): """ The calendar of the underlying's currency. :return: str """ return self._get_parameter("fixingCalendar") @fixing_calendar.setter def fixing_calendar(self, value): self._set_parameter("fixingCalendar", value) @property def fixing_end_date(self): """ The end date of the fixing period. Should be less or equal to the expiry. :return: str """ return self._get_parameter("fixingEndDate") @fixing_end_date.setter def fixing_end_date(self, value): self._set_parameter("fixingEndDate", value) @property def fixing_start_date(self): """ The beginning date of the fixing period. :return: str """ return self._get_parameter("fixingStartDate") @fixing_start_date.setter def fixing_start_date(self, value): self._set_parameter("fixingStartDate", value) @property def include_holidays(self): """ Include the holidays in the list of fixings :return: bool """ return self._get_parameter("includeHolidays") @include_holidays.setter def include_holidays(self, value): self._set_parameter("includeHolidays", value) @property def include_week_ends(self): """ Include the week-ends in the list of fixings :return: bool """ return self._get_parameter("includeWeekEnds") @include_week_ends.setter def include_week_ends(self, value): self._set_parameter("includeWeekEnds", value)
/refinitiv-dataplatform-1.0.0a15.tar.gz/refinitiv-dataplatform-1.0.0a15/refinitiv/dataplatform/content/ipa/contracts/option/_eti_fixing_info.py
0.924133
0.215516
_eti_fixing_info.py
pypi