Upload 29 files
Browse files- openbb_platform/providers/tmx/openbb_tmx/__init__.py +71 -0
- openbb_platform/providers/tmx/openbb_tmx/models/__init__.py +1 -0
- openbb_platform/providers/tmx/openbb_tmx/models/available_indices.py +134 -0
- openbb_platform/providers/tmx/openbb_tmx/models/bond_prices.py +198 -0
- openbb_platform/providers/tmx/openbb_tmx/models/calendar_earnings.py +147 -0
- openbb_platform/providers/tmx/openbb_tmx/models/company_filings.py +184 -0
- openbb_platform/providers/tmx/openbb_tmx/models/company_news.py +136 -0
- openbb_platform/providers/tmx/openbb_tmx/models/equity_historical.py +248 -0
- openbb_platform/providers/tmx/openbb_tmx/models/equity_profile.py +167 -0
- openbb_platform/providers/tmx/openbb_tmx/models/equity_quote.py +382 -0
- openbb_platform/providers/tmx/openbb_tmx/models/equity_search.py +72 -0
- openbb_platform/providers/tmx/openbb_tmx/models/etf_countries.py +114 -0
- openbb_platform/providers/tmx/openbb_tmx/models/etf_holdings.py +150 -0
- openbb_platform/providers/tmx/openbb_tmx/models/etf_info.py +239 -0
- openbb_platform/providers/tmx/openbb_tmx/models/etf_search.py +270 -0
- openbb_platform/providers/tmx/openbb_tmx/models/etf_sectors.py +95 -0
- openbb_platform/providers/tmx/openbb_tmx/models/gainers.py +174 -0
- openbb_platform/providers/tmx/openbb_tmx/models/historical_dividends.py +111 -0
- openbb_platform/providers/tmx/openbb_tmx/models/index_constituents.py +98 -0
- openbb_platform/providers/tmx/openbb_tmx/models/index_sectors.py +92 -0
- openbb_platform/providers/tmx/openbb_tmx/models/index_snapshots.py +288 -0
- openbb_platform/providers/tmx/openbb_tmx/models/insider_trading.py +173 -0
- openbb_platform/providers/tmx/openbb_tmx/models/options_chains.py +115 -0
- openbb_platform/providers/tmx/openbb_tmx/models/price_target_consensus.py +172 -0
- openbb_platform/providers/tmx/openbb_tmx/models/treasury_prices.py +161 -0
- openbb_platform/providers/tmx/openbb_tmx/py.typed +0 -0
- openbb_platform/providers/tmx/openbb_tmx/utils/__init__.py +1 -0
- openbb_platform/providers/tmx/openbb_tmx/utils/gql.py +513 -0
- openbb_platform/providers/tmx/openbb_tmx/utils/helpers.py +1147 -0
openbb_platform/providers/tmx/openbb_tmx/__init__.py
ADDED
|
@@ -0,0 +1,71 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Provider Module."""
|
| 2 |
+
|
| 3 |
+
from openbb_core.provider.abstract.provider import Provider
|
| 4 |
+
from openbb_tmx.models.available_indices import TmxAvailableIndicesFetcher
|
| 5 |
+
from openbb_tmx.models.bond_prices import TmxBondPricesFetcher
|
| 6 |
+
from openbb_tmx.models.calendar_earnings import TmxCalendarEarningsFetcher
|
| 7 |
+
from openbb_tmx.models.company_filings import TmxCompanyFilingsFetcher
|
| 8 |
+
from openbb_tmx.models.company_news import TmxCompanyNewsFetcher
|
| 9 |
+
from openbb_tmx.models.equity_historical import TmxEquityHistoricalFetcher
|
| 10 |
+
from openbb_tmx.models.equity_profile import TmxEquityProfileFetcher
|
| 11 |
+
from openbb_tmx.models.equity_quote import TmxEquityQuoteFetcher
|
| 12 |
+
from openbb_tmx.models.equity_search import TmxEquitySearchFetcher
|
| 13 |
+
from openbb_tmx.models.etf_countries import TmxEtfCountriesFetcher
|
| 14 |
+
from openbb_tmx.models.etf_holdings import TmxEtfHoldingsFetcher
|
| 15 |
+
from openbb_tmx.models.etf_info import TmxEtfInfoFetcher
|
| 16 |
+
from openbb_tmx.models.etf_search import TmxEtfSearchFetcher
|
| 17 |
+
from openbb_tmx.models.etf_sectors import TmxEtfSectorsFetcher
|
| 18 |
+
from openbb_tmx.models.gainers import TmxGainersFetcher
|
| 19 |
+
from openbb_tmx.models.historical_dividends import TmxHistoricalDividendsFetcher
|
| 20 |
+
from openbb_tmx.models.index_constituents import TmxIndexConstituentsFetcher
|
| 21 |
+
from openbb_tmx.models.index_sectors import TmxIndexSectorsFetcher
|
| 22 |
+
from openbb_tmx.models.index_snapshots import TmxIndexSnapshotsFetcher
|
| 23 |
+
from openbb_tmx.models.insider_trading import TmxInsiderTradingFetcher
|
| 24 |
+
from openbb_tmx.models.options_chains import TmxOptionsChainsFetcher
|
| 25 |
+
from openbb_tmx.models.price_target_consensus import TmxPriceTargetConsensusFetcher
|
| 26 |
+
from openbb_tmx.models.treasury_prices import TmxTreasuryPricesFetcher
|
| 27 |
+
|
| 28 |
+
tmx_provider = Provider(
|
| 29 |
+
name="tmx",
|
| 30 |
+
website="https://www.tmx.com",
|
| 31 |
+
description="""Unofficial TMX Data Provider Extension
|
| 32 |
+
TMX Group Companies
|
| 33 |
+
- Toronto Stock Exchange
|
| 34 |
+
- TSX Venture Exchange
|
| 35 |
+
- TSX Trust
|
| 36 |
+
- Montréal Exchange
|
| 37 |
+
- TSX Alpha Exchange
|
| 38 |
+
- Shorcan
|
| 39 |
+
- CDCC
|
| 40 |
+
- CDS
|
| 41 |
+
- TMX Datalinx
|
| 42 |
+
- Trayport
|
| 43 |
+
""",
|
| 44 |
+
fetcher_dict={
|
| 45 |
+
"AvailableIndices": TmxAvailableIndicesFetcher,
|
| 46 |
+
"BondPrices": TmxBondPricesFetcher,
|
| 47 |
+
"CalendarEarnings": TmxCalendarEarningsFetcher,
|
| 48 |
+
"CompanyFilings": TmxCompanyFilingsFetcher,
|
| 49 |
+
"CompanyNews": TmxCompanyNewsFetcher,
|
| 50 |
+
"EquityHistorical": TmxEquityHistoricalFetcher,
|
| 51 |
+
"EquityInfo": TmxEquityProfileFetcher,
|
| 52 |
+
"EquityQuote": TmxEquityQuoteFetcher,
|
| 53 |
+
"EquitySearch": TmxEquitySearchFetcher,
|
| 54 |
+
"EtfSearch": TmxEtfSearchFetcher,
|
| 55 |
+
"EtfHoldings": TmxEtfHoldingsFetcher,
|
| 56 |
+
"EtfSectors": TmxEtfSectorsFetcher,
|
| 57 |
+
"EtfCountries": TmxEtfCountriesFetcher,
|
| 58 |
+
"EtfHistorical": TmxEquityHistoricalFetcher,
|
| 59 |
+
"EtfInfo": TmxEtfInfoFetcher,
|
| 60 |
+
"EquityGainers": TmxGainersFetcher,
|
| 61 |
+
"HistoricalDividends": TmxHistoricalDividendsFetcher,
|
| 62 |
+
"IndexConstituents": TmxIndexConstituentsFetcher,
|
| 63 |
+
"IndexSectors": TmxIndexSectorsFetcher,
|
| 64 |
+
"IndexSnapshots": TmxIndexSnapshotsFetcher,
|
| 65 |
+
"InsiderTrading": TmxInsiderTradingFetcher,
|
| 66 |
+
"OptionsChains": TmxOptionsChainsFetcher,
|
| 67 |
+
"PriceTargetConsensus": TmxPriceTargetConsensusFetcher,
|
| 68 |
+
"TreasuryPrices": TmxTreasuryPricesFetcher,
|
| 69 |
+
},
|
| 70 |
+
repr_name="TMX",
|
| 71 |
+
)
|
openbb_platform/providers/tmx/openbb_tmx/models/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""TMX Provider Models."""
|
openbb_platform/providers/tmx/openbb_tmx/models/available_indices.py
ADDED
|
@@ -0,0 +1,134 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""Available Indices fetcher for TMX"""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from typing import Any, Dict, List, Optional
|
| 6 |
+
|
| 7 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 8 |
+
from openbb_core.provider.standard_models.available_indices import (
|
| 9 |
+
AvailableIndicesData,
|
| 10 |
+
AvailableIndicesQueryParams,
|
| 11 |
+
)
|
| 12 |
+
from openbb_core.provider.utils.errors import EmptyDataError
|
| 13 |
+
from pydantic import Field
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class TmxAvailableIndicesQueryParams(AvailableIndicesQueryParams):
|
| 17 |
+
"""TMX Available Indices Query Params."""
|
| 18 |
+
|
| 19 |
+
use_cache: bool = Field(
|
| 20 |
+
default=True,
|
| 21 |
+
description="Whether to use a cached request."
|
| 22 |
+
+ " Index data is from a single JSON file, updated each day after close."
|
| 23 |
+
+ " It is cached for one day. To bypass, set to False.",
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class TmxAvailableIndicesData(AvailableIndicesData):
|
| 28 |
+
"""TMX Available Indices Data."""
|
| 29 |
+
|
| 30 |
+
symbol: str = Field(description="The ticker symbol of the index.")
|
| 31 |
+
|
| 32 |
+
|
| 33 |
+
class TmxAvailableIndicesFetcher(
|
| 34 |
+
Fetcher[
|
| 35 |
+
TmxAvailableIndicesQueryParams,
|
| 36 |
+
List[TmxAvailableIndicesData],
|
| 37 |
+
]
|
| 38 |
+
):
|
| 39 |
+
"""Transform the query, extract and transform the data from the TMX endpoints."""
|
| 40 |
+
|
| 41 |
+
@staticmethod
|
| 42 |
+
def transform_query(params: Dict[str, Any]) -> TmxAvailableIndicesQueryParams:
|
| 43 |
+
"""Transform the query params."""
|
| 44 |
+
return TmxAvailableIndicesQueryParams(**params)
|
| 45 |
+
|
| 46 |
+
@staticmethod
|
| 47 |
+
async def aextract_data(
|
| 48 |
+
query: TmxAvailableIndicesQueryParams,
|
| 49 |
+
credentials: Optional[Dict[str, str]],
|
| 50 |
+
**kwargs: Any,
|
| 51 |
+
) -> Dict:
|
| 52 |
+
"""Return the raw data from the TMX endpoint."""
|
| 53 |
+
# pylint: disable=import-outside-toplevel
|
| 54 |
+
from openbb_tmx.utils.helpers import get_data_from_url, get_indices_backend
|
| 55 |
+
|
| 56 |
+
url = "https://tmxinfoservices.com/files/indices/sptsx-indices.json"
|
| 57 |
+
|
| 58 |
+
data = await get_data_from_url(
|
| 59 |
+
url,
|
| 60 |
+
use_cache=query.use_cache,
|
| 61 |
+
backend=get_indices_backend(),
|
| 62 |
+
)
|
| 63 |
+
|
| 64 |
+
return data
|
| 65 |
+
|
| 66 |
+
@staticmethod
|
| 67 |
+
def transform_data(
|
| 68 |
+
query: TmxAvailableIndicesQueryParams,
|
| 69 |
+
data: Dict,
|
| 70 |
+
**kwargs: Any,
|
| 71 |
+
) -> List[TmxAvailableIndicesData]:
|
| 72 |
+
"""Transform the data to the standard format."""
|
| 73 |
+
# pylint: disable=import-outside-toplevel
|
| 74 |
+
import re
|
| 75 |
+
|
| 76 |
+
data = data.copy()
|
| 77 |
+
if data == {}:
|
| 78 |
+
raise EmptyDataError
|
| 79 |
+
|
| 80 |
+
# Extract the category for each index.
|
| 81 |
+
symbols = {}
|
| 82 |
+
for category, symbol_list in data["groups"].items():
|
| 83 |
+
for symbol in symbol_list:
|
| 84 |
+
if symbol not in symbols:
|
| 85 |
+
symbols[symbol] = category
|
| 86 |
+
else:
|
| 87 |
+
symbols[symbol].append(category)
|
| 88 |
+
category = {"category": symbols} # noqa: PLW2901
|
| 89 |
+
# Extract the data for each index and combine with the category.
|
| 90 |
+
new_data = []
|
| 91 |
+
for symbol in data["indices"]:
|
| 92 |
+
overview = data["indices"][symbol].get("overview_en", None)
|
| 93 |
+
if overview:
|
| 94 |
+
# Remove HTML tags from the overview
|
| 95 |
+
overview = re.sub("<.*?>", "", overview)
|
| 96 |
+
# Remove additional artifacts from the overview
|
| 97 |
+
overview = re.sub("\r|\n|amp;", "", overview)
|
| 98 |
+
new_data.append(
|
| 99 |
+
{
|
| 100 |
+
"symbol": symbol,
|
| 101 |
+
"name": data["indices"][symbol].get("name_en", None),
|
| 102 |
+
"currency": (
|
| 103 |
+
"USD"
|
| 104 |
+
if "(USD)" in data["indices"][symbol]["name_en"]
|
| 105 |
+
else "CAD"
|
| 106 |
+
),
|
| 107 |
+
"category": symbols[symbol],
|
| 108 |
+
"market_value": (
|
| 109 |
+
data["indices"][symbol]["quotedmarketvalue"].get("total", None)
|
| 110 |
+
if data["indices"][symbol].get("quotedmarketvalue")
|
| 111 |
+
else None
|
| 112 |
+
),
|
| 113 |
+
"num_constituents": data["indices"][symbol].get(
|
| 114 |
+
"nb_constituents", None
|
| 115 |
+
),
|
| 116 |
+
"overview": (
|
| 117 |
+
overview
|
| 118 |
+
if data["indices"][symbol].get("overview") != ""
|
| 119 |
+
else None
|
| 120 |
+
),
|
| 121 |
+
"methodology": (
|
| 122 |
+
data["indices"][symbol].get("methodology", None)
|
| 123 |
+
if data["indices"][symbol].get("methodology") != ""
|
| 124 |
+
else None
|
| 125 |
+
),
|
| 126 |
+
"factsheet": (
|
| 127 |
+
data["indices"][symbol].get("factsheet", None)
|
| 128 |
+
if data["indices"][symbol].get("factsheet") != ""
|
| 129 |
+
else None
|
| 130 |
+
),
|
| 131 |
+
}
|
| 132 |
+
)
|
| 133 |
+
|
| 134 |
+
return [TmxAvailableIndicesData.model_validate(d) for d in new_data]
|
openbb_platform/providers/tmx/openbb_tmx/models/bond_prices.py
ADDED
|
@@ -0,0 +1,198 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Bond Prices Fetcher"""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
from datetime import (
|
| 5 |
+
date as dateType,
|
| 6 |
+
datetime,
|
| 7 |
+
)
|
| 8 |
+
from typing import TYPE_CHECKING, Any, Dict, List, Optional
|
| 9 |
+
|
| 10 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 11 |
+
from openbb_core.provider.standard_models.bond_reference import (
|
| 12 |
+
BondReferenceData,
|
| 13 |
+
BondReferenceQueryParams,
|
| 14 |
+
)
|
| 15 |
+
from pydantic import Field, field_validator
|
| 16 |
+
|
| 17 |
+
if TYPE_CHECKING:
|
| 18 |
+
from pandas import DataFrame
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class TmxBondPricesQueryParams(BondReferenceQueryParams):
|
| 22 |
+
"""
|
| 23 |
+
TMX Bond Prices Query Params.
|
| 24 |
+
|
| 25 |
+
Data will be made available by 5:00 EST on T+1
|
| 26 |
+
|
| 27 |
+
Source: https://bondtradedata.iiroc.ca/#/
|
| 28 |
+
"""
|
| 29 |
+
|
| 30 |
+
__alias_dict__ = {
|
| 31 |
+
# "isin": "isins",
|
| 32 |
+
}
|
| 33 |
+
issue_date_min: Optional[dateType] = Field(
|
| 34 |
+
default=None,
|
| 35 |
+
description="Filter by the minimum original issue date.",
|
| 36 |
+
)
|
| 37 |
+
issue_date_max: Optional[dateType] = Field(
|
| 38 |
+
default=None,
|
| 39 |
+
description="Filter by the maximum original issue date.",
|
| 40 |
+
)
|
| 41 |
+
last_traded_min: Optional[dateType] = Field(
|
| 42 |
+
default=None,
|
| 43 |
+
description="Filter by the minimum last trade date.",
|
| 44 |
+
)
|
| 45 |
+
use_cache: bool = Field(
|
| 46 |
+
default=True,
|
| 47 |
+
description="All bond data is sourced from a single JSON file that is updated daily."
|
| 48 |
+
+ " The file is cached for one day to eliminate downloading more than once."
|
| 49 |
+
+ " Caching will significantly speed up subsequent queries. To bypass, set to False.",
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
|
| 53 |
+
class TmxBondPricesData(BondReferenceData):
|
| 54 |
+
"""TMX Bond Prices Data."""
|
| 55 |
+
|
| 56 |
+
__alias_dict__ = {
|
| 57 |
+
"coupon_rate": "couponRate",
|
| 58 |
+
"ytm": "lastYield",
|
| 59 |
+
"price": "lastPrice",
|
| 60 |
+
"highest_price": "highestPrice",
|
| 61 |
+
"lowest_price": "lowestPrice",
|
| 62 |
+
"total_trades": "totalTrades",
|
| 63 |
+
"last_traded_date": "lastTradedDate",
|
| 64 |
+
"maturity_date": "maturityDate",
|
| 65 |
+
"issue_date": "originalIssueDate",
|
| 66 |
+
"issuer_name": "issuer",
|
| 67 |
+
}
|
| 68 |
+
|
| 69 |
+
ytm: Optional[float] = Field(
|
| 70 |
+
default=None,
|
| 71 |
+
description="Yield to maturity (YTM) is the rate of return anticipated on a bond"
|
| 72 |
+
+ " if it is held until the maturity date. It takes into account"
|
| 73 |
+
+ " the current market price, par value, coupon rate and time to maturity. It is assumed that all"
|
| 74 |
+
+ " coupons are reinvested at the same rate."
|
| 75 |
+
+ " Values are returned as a normalized percent.",
|
| 76 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 77 |
+
)
|
| 78 |
+
price: Optional[float] = Field(
|
| 79 |
+
default=None,
|
| 80 |
+
description="The last price for the bond.",
|
| 81 |
+
json_schema_extra={"x-unit_measurement": "currency"},
|
| 82 |
+
)
|
| 83 |
+
highest_price: Optional[float] = Field(
|
| 84 |
+
default=None,
|
| 85 |
+
description="The highest price for the bond on the last traded date.",
|
| 86 |
+
json_schema_extra={"x-unit_measurement": "currency"},
|
| 87 |
+
)
|
| 88 |
+
lowest_price: Optional[float] = Field(
|
| 89 |
+
default=None,
|
| 90 |
+
description="The lowest price for the bond on the last traded date.",
|
| 91 |
+
json_schema_extra={"x-unit_measurement": "currency"},
|
| 92 |
+
)
|
| 93 |
+
total_trades: Optional[int] = Field(
|
| 94 |
+
default=None,
|
| 95 |
+
description="Total number of trades on the last traded date.",
|
| 96 |
+
)
|
| 97 |
+
last_traded_date: Optional[dateType] = Field(
|
| 98 |
+
default=None,
|
| 99 |
+
description="Last traded date of the bond.",
|
| 100 |
+
)
|
| 101 |
+
maturity_date: Optional[dateType] = Field(
|
| 102 |
+
default=None,
|
| 103 |
+
description="Maturity date of the bond.",
|
| 104 |
+
)
|
| 105 |
+
issue_date: Optional[dateType] = Field(
|
| 106 |
+
default=None,
|
| 107 |
+
description="Issue date of the bond. This is the date when the bond first accrues interest.",
|
| 108 |
+
)
|
| 109 |
+
issuer_name: Optional[str] = Field(
|
| 110 |
+
default=None,
|
| 111 |
+
description="Name of the issuing entity.",
|
| 112 |
+
)
|
| 113 |
+
|
| 114 |
+
@field_validator(
|
| 115 |
+
"ytm",
|
| 116 |
+
"coupon_rate",
|
| 117 |
+
mode="before",
|
| 118 |
+
check_fields=False,
|
| 119 |
+
)
|
| 120 |
+
@classmethod
|
| 121 |
+
def normalize_percent(cls, v):
|
| 122 |
+
"""Return percents as normalized percentage points."""
|
| 123 |
+
return float(v) / 100 if v else None
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
class TmxBondPricesFetcher(
|
| 127 |
+
Fetcher[
|
| 128 |
+
TmxBondPricesQueryParams,
|
| 129 |
+
List[TmxBondPricesData],
|
| 130 |
+
]
|
| 131 |
+
):
|
| 132 |
+
"""Tmx Bond Reference Fetcher."""
|
| 133 |
+
|
| 134 |
+
@staticmethod
|
| 135 |
+
def transform_query(params: Dict[str, Any]) -> TmxBondPricesQueryParams:
|
| 136 |
+
"""Transform query params."""
|
| 137 |
+
# pylint: disable=import-outside-toplevel
|
| 138 |
+
from datetime import timedelta
|
| 139 |
+
|
| 140 |
+
transformed_params = params.copy()
|
| 141 |
+
now = datetime.now()
|
| 142 |
+
if now.date().weekday() > 4:
|
| 143 |
+
now = now - timedelta(now.date().weekday() - 4)
|
| 144 |
+
if transformed_params.get("maturity_date_min") is None:
|
| 145 |
+
transformed_params["maturity_date_min"] = (
|
| 146 |
+
now - timedelta(days=1)
|
| 147 |
+
).strftime("%Y-%m-%d")
|
| 148 |
+
return TmxBondPricesQueryParams(**transformed_params)
|
| 149 |
+
|
| 150 |
+
@staticmethod
|
| 151 |
+
async def aextract_data(
|
| 152 |
+
query: TmxBondPricesQueryParams,
|
| 153 |
+
credentials: Optional[Dict[str, str]],
|
| 154 |
+
**kwargs: Any,
|
| 155 |
+
) -> "DataFrame":
|
| 156 |
+
"""Get the raw data containing all bond data."""
|
| 157 |
+
# pylint: disable=import-outside-toplevel
|
| 158 |
+
from openbb_tmx.utils.helpers import get_all_bonds
|
| 159 |
+
|
| 160 |
+
bonds = await get_all_bonds(use_cache=query.use_cache)
|
| 161 |
+
return bonds
|
| 162 |
+
|
| 163 |
+
@staticmethod
|
| 164 |
+
def transform_data(
|
| 165 |
+
query: TmxBondPricesQueryParams,
|
| 166 |
+
data: "DataFrame",
|
| 167 |
+
**kwargs: Any,
|
| 168 |
+
) -> List[TmxBondPricesData]:
|
| 169 |
+
"""Transform data."""
|
| 170 |
+
bonds = data.copy()
|
| 171 |
+
results = []
|
| 172 |
+
data = data[data["bondType"] == "Corp"]
|
| 173 |
+
|
| 174 |
+
data = bonds.query(
|
| 175 |
+
"bondType == 'Corp'"
|
| 176 |
+
"& maturityDate >= @query.maturity_date_min.strftime('%Y-%m-%d')"
|
| 177 |
+
).sort_values(by=["maturityDate"])
|
| 178 |
+
data.issuer = data.loc[:, "issuer"].str.strip()
|
| 179 |
+
if query.maturity_date_max:
|
| 180 |
+
data = data.query(
|
| 181 |
+
"maturityDate <= @query.maturity_date_max.strftime('%Y-%m-%d')"
|
| 182 |
+
)
|
| 183 |
+
if query.last_traded_min:
|
| 184 |
+
data = data.query(
|
| 185 |
+
"lastTradedDate >= @query.last_traded_min.strftime('%Y-%m-%d')"
|
| 186 |
+
)
|
| 187 |
+
if query.coupon_min:
|
| 188 |
+
data = data.query("couponRate >= @query.coupon_min")
|
| 189 |
+
if query.coupon_max:
|
| 190 |
+
data = data.query("couponRate <= @query.coupon_max")
|
| 191 |
+
if query.issuer_name:
|
| 192 |
+
data = data.query("issuer.str.contains(@query.issuer_name, case=False)")
|
| 193 |
+
if len(data) > 0:
|
| 194 |
+
data = data.drop(columns=["bondType", "securityId", "secKey"])
|
| 195 |
+
data = data.fillna("N/A").replace("N/A", None)
|
| 196 |
+
results = data.to_dict("records")
|
| 197 |
+
|
| 198 |
+
return [TmxBondPricesData.model_validate(d) for d in results]
|
openbb_platform/providers/tmx/openbb_tmx/models/calendar_earnings.py
ADDED
|
@@ -0,0 +1,147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Earnings Calendar Model."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
from typing import Any, Dict, List, Optional
|
| 7 |
+
|
| 8 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 9 |
+
from openbb_core.provider.standard_models.calendar_earnings import (
|
| 10 |
+
CalendarEarningsData,
|
| 11 |
+
CalendarEarningsQueryParams,
|
| 12 |
+
)
|
| 13 |
+
from pydantic import Field, field_validator
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class TmxCalendarEarningsQueryParams(CalendarEarningsQueryParams):
|
| 17 |
+
"""TMX Calendar Earnings Query."""
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class TmxCalendarEarningsData(CalendarEarningsData):
|
| 21 |
+
"""TMX Calendar Earnings Data."""
|
| 22 |
+
|
| 23 |
+
__alias_dict__ = {
|
| 24 |
+
"eps_actual": "actualEps",
|
| 25 |
+
"reporting_time": "announceTime",
|
| 26 |
+
"eps_consensus": "estimatedEps",
|
| 27 |
+
"eps_surprise": "epsSurpriseDollar",
|
| 28 |
+
"surprise_percent": "epsSurprisePercent",
|
| 29 |
+
"name": "companyName",
|
| 30 |
+
}
|
| 31 |
+
|
| 32 |
+
name: str = Field(description="The company's name.")
|
| 33 |
+
eps_consensus: Optional[float] = Field(
|
| 34 |
+
default=None, description="The consensus estimated EPS in dollars."
|
| 35 |
+
)
|
| 36 |
+
eps_actual: Optional[float] = Field(
|
| 37 |
+
default=None, description="The actual EPS in dollars."
|
| 38 |
+
)
|
| 39 |
+
eps_surprise: Optional[float] = Field(
|
| 40 |
+
default=None, description="The EPS surprise in dollars."
|
| 41 |
+
)
|
| 42 |
+
surprise_percent: Optional[float] = Field(
|
| 43 |
+
default=None,
|
| 44 |
+
description="The EPS surprise as a normalized percent.",
|
| 45 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 46 |
+
)
|
| 47 |
+
reporting_time: Optional[str] = Field(
|
| 48 |
+
default=None,
|
| 49 |
+
description="The time of the report - i.e., before or after market.",
|
| 50 |
+
)
|
| 51 |
+
|
| 52 |
+
@field_validator("surprise_percent", mode="before", check_fields=False)
|
| 53 |
+
@classmethod
|
| 54 |
+
def percent_validate(cls, v): # pylint: disable=E0213
|
| 55 |
+
"""Return the percent as a normalized value."""
|
| 56 |
+
return float(v) / 100 if v else None
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class TmxCalendarEarningsFetcher(
|
| 60 |
+
Fetcher[TmxCalendarEarningsQueryParams, List[TmxCalendarEarningsData]]
|
| 61 |
+
):
|
| 62 |
+
"""Transform the query, extract and transform the data from the TMX endpoints."""
|
| 63 |
+
|
| 64 |
+
@staticmethod
|
| 65 |
+
def transform_query(params: Dict[str, Any]) -> TmxCalendarEarningsQueryParams:
|
| 66 |
+
"""Transform the query."""
|
| 67 |
+
# pylint: disable=import-outside-toplevel
|
| 68 |
+
from datetime import timedelta
|
| 69 |
+
|
| 70 |
+
transformed_params = params.copy()
|
| 71 |
+
if transformed_params.get("start_date") is None:
|
| 72 |
+
transformed_params["start_date"] = (
|
| 73 |
+
datetime.now().date().strftime("%Y-%m-%d")
|
| 74 |
+
)
|
| 75 |
+
if transformed_params.get("end_date") is None:
|
| 76 |
+
transformed_params["end_date"] = (
|
| 77 |
+
(datetime.now() + timedelta(days=5)).date().strftime("%Y-%m-%d")
|
| 78 |
+
)
|
| 79 |
+
return TmxCalendarEarningsQueryParams(**transformed_params)
|
| 80 |
+
|
| 81 |
+
@staticmethod
|
| 82 |
+
async def aextract_data(
|
| 83 |
+
query: TmxCalendarEarningsQueryParams,
|
| 84 |
+
credentials: Optional[Dict[str, str]],
|
| 85 |
+
**kwargs: Any,
|
| 86 |
+
) -> List[Dict]:
|
| 87 |
+
"""Return the raw data from the TMX endpoint."""
|
| 88 |
+
# pylint: disable=import-outside-toplevel
|
| 89 |
+
import asyncio # noqa
|
| 90 |
+
import json # noqa
|
| 91 |
+
from openbb_tmx.utils import gql # noqa
|
| 92 |
+
from openbb_tmx.utils.helpers import get_data_from_gql, get_random_agent # noqa
|
| 93 |
+
from pandas import date_range # noqa
|
| 94 |
+
|
| 95 |
+
results: List[Dict] = []
|
| 96 |
+
user_agent = get_random_agent()
|
| 97 |
+
dates = date_range(query.start_date, end=query.end_date)
|
| 98 |
+
|
| 99 |
+
async def create_task(date, results):
|
| 100 |
+
"""Create a task for a single date in the range."""
|
| 101 |
+
data = []
|
| 102 |
+
date = date.strftime("%Y-%m-%d")
|
| 103 |
+
payload = gql.get_earnings_date_payload.copy()
|
| 104 |
+
payload["variables"]["date"] = date
|
| 105 |
+
url = "https://app-money.tmx.com/graphql"
|
| 106 |
+
r = await get_data_from_gql(
|
| 107 |
+
method="POST",
|
| 108 |
+
url=url,
|
| 109 |
+
data=json.dumps(payload),
|
| 110 |
+
headers={
|
| 111 |
+
"Host": "app-money.tmx.com",
|
| 112 |
+
"Referer": "https://money.tmx.com/",
|
| 113 |
+
"locale": "en",
|
| 114 |
+
"Content-Type": "application/json",
|
| 115 |
+
"User-Agent": user_agent,
|
| 116 |
+
"Accept": "*/*",
|
| 117 |
+
},
|
| 118 |
+
timeout=3,
|
| 119 |
+
)
|
| 120 |
+
try:
|
| 121 |
+
if (
|
| 122 |
+
"data" in r
|
| 123 |
+
and r["data"].get("getEnhancedEarningsForDate") is not None
|
| 124 |
+
):
|
| 125 |
+
data = r["data"].get("getEnhancedEarningsForDate")
|
| 126 |
+
data = [{"report_date": date, **d} for d in data]
|
| 127 |
+
except Exception as e:
|
| 128 |
+
raise RuntimeError(e) from e
|
| 129 |
+
if len(data) > 0:
|
| 130 |
+
results.extend(data)
|
| 131 |
+
return results
|
| 132 |
+
|
| 133 |
+
tasks = [create_task(date, results) for date in dates if date.weekday() < 5]
|
| 134 |
+
|
| 135 |
+
await asyncio.gather(*tasks)
|
| 136 |
+
|
| 137 |
+
return sorted(results, key=lambda x: x["report_date"])
|
| 138 |
+
|
| 139 |
+
@staticmethod
|
| 140 |
+
def transform_data(
|
| 141 |
+
query: TmxCalendarEarningsQueryParams,
|
| 142 |
+
data: List[Dict],
|
| 143 |
+
**kwargs: Any,
|
| 144 |
+
) -> List[TmxCalendarEarningsData]:
|
| 145 |
+
"""Return the transformed data."""
|
| 146 |
+
results = [{k: (None if v == "N/A" else v) for k, v in d.items()} for d in data]
|
| 147 |
+
return [TmxCalendarEarningsData.model_validate(d) for d in results]
|
openbb_platform/providers/tmx/openbb_tmx/models/company_filings.py
ADDED
|
@@ -0,0 +1,184 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Company Filings Model."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from datetime import (
|
| 6 |
+
date as dateType,
|
| 7 |
+
datetime,
|
| 8 |
+
)
|
| 9 |
+
from typing import Any, Dict, List, Optional
|
| 10 |
+
|
| 11 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 12 |
+
from openbb_core.provider.standard_models.company_filings import (
|
| 13 |
+
CompanyFilingsData,
|
| 14 |
+
CompanyFilingsQueryParams,
|
| 15 |
+
)
|
| 16 |
+
from pydantic import Field, field_validator
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class TmxCompanyFilingsQueryParams(CompanyFilingsQueryParams):
|
| 20 |
+
"""TMX Company Filings Query Parameters."""
|
| 21 |
+
|
| 22 |
+
start_date: Optional[dateType] = Field(
|
| 23 |
+
description="The start date to fetch.",
|
| 24 |
+
default=None,
|
| 25 |
+
)
|
| 26 |
+
end_date: Optional[dateType] = Field(
|
| 27 |
+
description="The end date to fetch.",
|
| 28 |
+
default=None,
|
| 29 |
+
)
|
| 30 |
+
|
| 31 |
+
@field_validator("symbol", mode="before", check_fields=False)
|
| 32 |
+
@classmethod
|
| 33 |
+
def _validate_symbol(cls, v: str):
|
| 34 |
+
"""Validate the symbol."""
|
| 35 |
+
if not v:
|
| 36 |
+
raise ValueError("Symbol is required for TMX.")
|
| 37 |
+
return v
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
class TmxCompanyFilingsData(CompanyFilingsData):
|
| 41 |
+
"""TMX Sedar Filings Data."""
|
| 42 |
+
|
| 43 |
+
__alias_dict__ = {
|
| 44 |
+
"filing_date": "filingDate",
|
| 45 |
+
"report_type": "name",
|
| 46 |
+
"report_url": "urlToPdf",
|
| 47 |
+
}
|
| 48 |
+
|
| 49 |
+
description: str = Field(description="The description of the filing.")
|
| 50 |
+
size: Optional[str] = Field(
|
| 51 |
+
description="The file size of the PDF document.", default=None
|
| 52 |
+
)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
class TmxCompanyFilingsFetcher(
|
| 56 |
+
Fetcher[TmxCompanyFilingsQueryParams, List[TmxCompanyFilingsData]]
|
| 57 |
+
):
|
| 58 |
+
"""TMX Company Filings Fetcher."""
|
| 59 |
+
|
| 60 |
+
@staticmethod
|
| 61 |
+
def transform_query(params: Dict[str, Any]) -> TmxCompanyFilingsQueryParams:
|
| 62 |
+
"""Transform the query."""
|
| 63 |
+
# pylint: disable=import-outside-toplevel
|
| 64 |
+
from datetime import timedelta
|
| 65 |
+
|
| 66 |
+
transformed_params = params.copy()
|
| 67 |
+
if transformed_params.get("start_date") is None:
|
| 68 |
+
transformed_params["start_date"] = (
|
| 69 |
+
datetime.now() - timedelta(weeks=16)
|
| 70 |
+
).strftime("%Y-%m-%d")
|
| 71 |
+
if transformed_params.get("end_date") is None:
|
| 72 |
+
transformed_params["end_date"] = datetime.now().date().strftime("%Y-%m-%d")
|
| 73 |
+
transformed_params["symbol"] = (
|
| 74 |
+
params.get("symbol", "")
|
| 75 |
+
.upper()
|
| 76 |
+
.replace("-", ".")
|
| 77 |
+
.replace(".TO", "")
|
| 78 |
+
.replace(".TSX", "")
|
| 79 |
+
)
|
| 80 |
+
return TmxCompanyFilingsQueryParams(**transformed_params)
|
| 81 |
+
|
| 82 |
+
@staticmethod
|
| 83 |
+
async def aextract_data(
|
| 84 |
+
query: TmxCompanyFilingsQueryParams,
|
| 85 |
+
credentials: Optional[Dict[str, str]],
|
| 86 |
+
**kwargs: Any,
|
| 87 |
+
) -> List[Dict]:
|
| 88 |
+
"""Return the raw data from the TMX endpoint."""
|
| 89 |
+
# pylint: disable=import-outside-toplevel
|
| 90 |
+
import asyncio # noqa
|
| 91 |
+
import json # noqa
|
| 92 |
+
from dateutil import rrule # noqa
|
| 93 |
+
from datetime import timedelta # noqa
|
| 94 |
+
from openbb_tmx.utils import gql # noqa
|
| 95 |
+
from openbb_tmx.utils.helpers import get_data_from_gql, get_random_agent # noqa
|
| 96 |
+
|
| 97 |
+
user_agent = get_random_agent()
|
| 98 |
+
results: List[Dict] = []
|
| 99 |
+
|
| 100 |
+
# Generate a list of dates from start_date to end_date with a frequency of 1 week
|
| 101 |
+
dates = list(
|
| 102 |
+
rrule.rrule(
|
| 103 |
+
rrule.WEEKLY, interval=1, dtstart=query.start_date, until=query.end_date
|
| 104 |
+
)
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
# Add end_date to the list if it's not there already
|
| 108 |
+
if dates[-1] != query.end_date:
|
| 109 |
+
dates.append(query.end_date) # type: ignore
|
| 110 |
+
|
| 111 |
+
# Create a list of 4-week chunks
|
| 112 |
+
chunks = [
|
| 113 |
+
(dates[i], dates[i + 1] - timedelta(days=1)) for i in range(len(dates) - 1)
|
| 114 |
+
]
|
| 115 |
+
|
| 116 |
+
# Adjust the end date of the last chunk to be the final end date
|
| 117 |
+
chunks[-1] = (chunks[-1][0], query.end_date) # type: ignore
|
| 118 |
+
|
| 119 |
+
async def create_task(start, end, results):
|
| 120 |
+
"""Create tasks from the chunked start/end dates."""
|
| 121 |
+
data = []
|
| 122 |
+
payload = gql.get_company_filings_payload
|
| 123 |
+
payload["variables"]["symbol"] = query.symbol
|
| 124 |
+
payload["variables"]["fromDate"] = start.strftime("%Y-%m-%d")
|
| 125 |
+
payload["variables"]["toDate"] = end.strftime("%Y-%m-%d")
|
| 126 |
+
payload["variables"]["limit"] = 1000
|
| 127 |
+
url = "https://app-money.tmx.com/graphql"
|
| 128 |
+
|
| 129 |
+
async def try_again():
|
| 130 |
+
return await get_data_from_gql(
|
| 131 |
+
method="POST",
|
| 132 |
+
url=url,
|
| 133 |
+
data=json.dumps(payload),
|
| 134 |
+
headers={
|
| 135 |
+
"authority": "app-money.tmx.com",
|
| 136 |
+
"referer": f"https://money.tmx.com/en/quote/{query.symbol}",
|
| 137 |
+
"locale": "en",
|
| 138 |
+
"Content-Type": "application/json",
|
| 139 |
+
"User-Agent": user_agent,
|
| 140 |
+
"Accept": "*/*",
|
| 141 |
+
},
|
| 142 |
+
timeout=10,
|
| 143 |
+
)
|
| 144 |
+
|
| 145 |
+
try:
|
| 146 |
+
data = await get_data_from_gql(
|
| 147 |
+
method="POST",
|
| 148 |
+
url=url,
|
| 149 |
+
data=json.dumps(payload),
|
| 150 |
+
headers={
|
| 151 |
+
"authority": "app-money.tmx.com",
|
| 152 |
+
"referer": f"https://money.tmx.com/en/quote/{query.symbol}",
|
| 153 |
+
"locale": "en",
|
| 154 |
+
"Content-Type": "application/json",
|
| 155 |
+
"User-Agent": user_agent,
|
| 156 |
+
"Accept": "*/*",
|
| 157 |
+
},
|
| 158 |
+
timeout=10,
|
| 159 |
+
)
|
| 160 |
+
except Exception:
|
| 161 |
+
data = await try_again()
|
| 162 |
+
|
| 163 |
+
if isinstance(data, str):
|
| 164 |
+
data = await try_again()
|
| 165 |
+
|
| 166 |
+
if data and data.get("data", {}).get("filings"): # type: ignore
|
| 167 |
+
results.extend(data["data"]["filings"]) # type: ignore
|
| 168 |
+
|
| 169 |
+
return results
|
| 170 |
+
|
| 171 |
+
tasks = [create_task(chunk[0], chunk[1], results) for chunk in chunks]
|
| 172 |
+
|
| 173 |
+
await asyncio.gather(*tasks)
|
| 174 |
+
|
| 175 |
+
return sorted(results, key=lambda x: x["filingDate"], reverse=True)
|
| 176 |
+
|
| 177 |
+
@staticmethod
|
| 178 |
+
def transform_data(
|
| 179 |
+
query: TmxCompanyFilingsQueryParams,
|
| 180 |
+
data: List[Dict],
|
| 181 |
+
**kwargs: Any,
|
| 182 |
+
) -> List[TmxCompanyFilingsData]:
|
| 183 |
+
"""Return the transformed data."""
|
| 184 |
+
return [TmxCompanyFilingsData.model_validate(d) for d in data]
|
openbb_platform/providers/tmx/openbb_tmx/models/company_news.py
ADDED
|
@@ -0,0 +1,136 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Stock News model."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from datetime import datetime
|
| 6 |
+
from typing import Any, Dict, List, Optional
|
| 7 |
+
|
| 8 |
+
from openbb_core.app.model.abstract.error import OpenBBError
|
| 9 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 10 |
+
from openbb_core.provider.standard_models.company_news import (
|
| 11 |
+
CompanyNewsData,
|
| 12 |
+
CompanyNewsQueryParams,
|
| 13 |
+
)
|
| 14 |
+
from pydantic import Field, field_validator
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class TmxCompanyNewsQueryParams(CompanyNewsQueryParams):
|
| 18 |
+
"""TMX Stock News query."""
|
| 19 |
+
|
| 20 |
+
__json_schema_extra__ = {"symbol": {"multiple_items_allowed": True}}
|
| 21 |
+
|
| 22 |
+
page: Optional[int] = Field(
|
| 23 |
+
default=1, description="The page number to start from. Use with limit."
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
@field_validator("symbol", mode="before")
|
| 27 |
+
@classmethod
|
| 28 |
+
def symbols_validate(cls, v):
|
| 29 |
+
"""Validate the symbols."""
|
| 30 |
+
if v is None:
|
| 31 |
+
raise OpenBBError("Symbol is a required field for TMX.")
|
| 32 |
+
return v
|
| 33 |
+
|
| 34 |
+
|
| 35 |
+
class TmxCompanyNewsData(CompanyNewsData):
|
| 36 |
+
"""TMX Stock News Data."""
|
| 37 |
+
|
| 38 |
+
__alias_dict__ = {
|
| 39 |
+
"date": "datetime",
|
| 40 |
+
"title": "headline",
|
| 41 |
+
}
|
| 42 |
+
|
| 43 |
+
source: Optional[str] = Field(description="Source of the news.", default=None)
|
| 44 |
+
|
| 45 |
+
@field_validator("date", mode="before", check_fields=False)
|
| 46 |
+
@classmethod
|
| 47 |
+
def date_validate(cls, v):
|
| 48 |
+
"""Validate the datetime format."""
|
| 49 |
+
# pylint: disable=import-outside-toplevel
|
| 50 |
+
import pytz
|
| 51 |
+
|
| 52 |
+
dt = datetime.strptime(v, "%Y-%m-%dT%H:%M:%S%z")
|
| 53 |
+
return dt.astimezone(pytz.timezone("America/New_York"))
|
| 54 |
+
|
| 55 |
+
|
| 56 |
+
class TmxCompanyNewsFetcher(
|
| 57 |
+
Fetcher[TmxCompanyNewsQueryParams, List[TmxCompanyNewsData]],
|
| 58 |
+
):
|
| 59 |
+
"""TMX Stock News Fetcher."""
|
| 60 |
+
|
| 61 |
+
@staticmethod
|
| 62 |
+
def transform_query(params: Dict[str, Any]) -> TmxCompanyNewsQueryParams:
|
| 63 |
+
"""Transform the query."""
|
| 64 |
+
return TmxCompanyNewsQueryParams(**params)
|
| 65 |
+
|
| 66 |
+
@staticmethod
|
| 67 |
+
async def aextract_data(
|
| 68 |
+
query: TmxCompanyNewsQueryParams,
|
| 69 |
+
credentials: Optional[Dict[str, str]],
|
| 70 |
+
**kwargs: Any,
|
| 71 |
+
) -> List[Dict]:
|
| 72 |
+
"""Return the raw data from the TMX endpoint."""
|
| 73 |
+
# pylint: disable=import-outside-toplevel
|
| 74 |
+
import asyncio # noqa
|
| 75 |
+
import json # noqa
|
| 76 |
+
from openbb_tmx.utils import gql # noqa
|
| 77 |
+
from openbb_tmx.utils.helpers import get_data_from_gql, get_random_agent # noqa
|
| 78 |
+
|
| 79 |
+
user_agent = get_random_agent()
|
| 80 |
+
symbols = query.symbol.split(",") # type: ignore
|
| 81 |
+
results: List[Dict] = []
|
| 82 |
+
|
| 83 |
+
async def create_task(symbol, results):
|
| 84 |
+
"""Make a POST request to the TMX GraphQL endpoint for a single symbol."""
|
| 85 |
+
symbol = (
|
| 86 |
+
symbol.upper().replace(".TO", "").replace(".TSX", "").replace("-", ".")
|
| 87 |
+
)
|
| 88 |
+
payload = gql.get_company_news_events_payload
|
| 89 |
+
payload["variables"]["symbol"] = symbol
|
| 90 |
+
payload["variables"]["page"] = query.page
|
| 91 |
+
payload["variables"]["limit"] = query.limit
|
| 92 |
+
payload["variables"]["locale"] = "en"
|
| 93 |
+
url = "https://app-money.tmx.com/graphql"
|
| 94 |
+
data: Dict = {}
|
| 95 |
+
response = await get_data_from_gql(
|
| 96 |
+
method="POST",
|
| 97 |
+
url=url,
|
| 98 |
+
data=json.dumps(payload),
|
| 99 |
+
headers={
|
| 100 |
+
"authority": "app-money.tmx.com",
|
| 101 |
+
"referer": f"https://money.tmx.com/en/quote/{symbol}",
|
| 102 |
+
"locale": "en",
|
| 103 |
+
"Content-Type": "application/json",
|
| 104 |
+
"User-Agent": user_agent,
|
| 105 |
+
"Accept": "*/*",
|
| 106 |
+
},
|
| 107 |
+
timeout=3,
|
| 108 |
+
)
|
| 109 |
+
data = response["data"] if response.get("data") else data
|
| 110 |
+
if data.get("news") is not None:
|
| 111 |
+
news = data["news"]
|
| 112 |
+
for i in range(len(news)): # pylint: disable=C0200
|
| 113 |
+
url = f"https://money.tmx.com/quote/{symbol.upper()}/news/{news[i]['newsid']}"
|
| 114 |
+
news[i]["url"] = url
|
| 115 |
+
# The newsid was used to create the URL, so we drop it.
|
| 116 |
+
news[i].pop("newsid", None)
|
| 117 |
+
# The summary is a duplicated headline, so we drop it.
|
| 118 |
+
news[i].pop("summary", None)
|
| 119 |
+
# Add the symbol to the data for multi-ticker support.
|
| 120 |
+
news[i]["symbols"] = symbol
|
| 121 |
+
results.extend(news)
|
| 122 |
+
|
| 123 |
+
return results
|
| 124 |
+
|
| 125 |
+
tasks = [create_task(symbol, results) for symbol in symbols]
|
| 126 |
+
|
| 127 |
+
await asyncio.gather(*tasks)
|
| 128 |
+
|
| 129 |
+
return sorted(results, key=lambda d: d["datetime"], reverse=True)
|
| 130 |
+
|
| 131 |
+
@staticmethod
|
| 132 |
+
def transform_data(
|
| 133 |
+
query: TmxCompanyNewsQueryParams, data: List[Dict], **kwargs: Any
|
| 134 |
+
) -> List[TmxCompanyNewsData]:
|
| 135 |
+
"""Return the transformed data."""
|
| 136 |
+
return [TmxCompanyNewsData.model_validate(d) for d in data]
|
openbb_platform/providers/tmx/openbb_tmx/models/equity_historical.py
ADDED
|
@@ -0,0 +1,248 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Equity Historical Model."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from datetime import (
|
| 6 |
+
date as dateType,
|
| 7 |
+
datetime,
|
| 8 |
+
)
|
| 9 |
+
from typing import Any, Dict, List, Literal, Optional, Union
|
| 10 |
+
from warnings import warn
|
| 11 |
+
|
| 12 |
+
from openbb_core.app.model.abstract.error import OpenBBError
|
| 13 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 14 |
+
from openbb_core.provider.standard_models.equity_historical import (
|
| 15 |
+
EquityHistoricalData,
|
| 16 |
+
EquityHistoricalQueryParams,
|
| 17 |
+
)
|
| 18 |
+
from openbb_core.provider.utils.descriptions import (
|
| 19 |
+
QUERY_DESCRIPTIONS,
|
| 20 |
+
)
|
| 21 |
+
from openbb_core.provider.utils.errors import EmptyDataError
|
| 22 |
+
from pydantic import Field, field_validator
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class TmxEquityHistoricalQueryParams(EquityHistoricalQueryParams):
|
| 26 |
+
"""
|
| 27 |
+
TMX Equity Historical Query Params.
|
| 28 |
+
|
| 29 |
+
Ticker symbols are assumed to be Canadian listings when no suffix is provided.
|
| 30 |
+
".TO" or ."TSX" are accepted but will automatically be removed.
|
| 31 |
+
|
| 32 |
+
US tickers are supported via their composite format: "AAPL:US"
|
| 33 |
+
|
| 34 |
+
Canadian Depositary Receipts (CDRs) are: "AAPL:AQL"
|
| 35 |
+
|
| 36 |
+
CDRs are the underlying asset for CAD-hedged assets.
|
| 37 |
+
|
| 38 |
+
source: https://money.tmx.com
|
| 39 |
+
"""
|
| 40 |
+
|
| 41 |
+
__json_schema_extra__ = {"symbol": {"multiple_items_allowed": True}}
|
| 42 |
+
|
| 43 |
+
interval: Union[
|
| 44 |
+
Literal["1m", "2m", "5m", "15m", "30m", "60m", "1h", "1d", "1W", "1M"], str, int
|
| 45 |
+
] = Field( # type: ignore
|
| 46 |
+
description=QUERY_DESCRIPTIONS.get("interval", "")
|
| 47 |
+
+ " Or, any integer (entered as a string) representing the number of minutes."
|
| 48 |
+
+ " Default is daily data."
|
| 49 |
+
+ " There is no extended hours data, and intraday data is limited to after April 12 2022.",
|
| 50 |
+
default="day",
|
| 51 |
+
)
|
| 52 |
+
adjustment: Literal["splits_only", "splits_and_dividends", "unadjusted"] = Field(
|
| 53 |
+
description="The adjustment factor to apply. Only valid for daily data.",
|
| 54 |
+
default="splits_only",
|
| 55 |
+
)
|
| 56 |
+
|
| 57 |
+
@field_validator("interval", mode="after", check_fields=False)
|
| 58 |
+
@classmethod
|
| 59 |
+
def validate_interval(cls, v): # pylint: disable=R0911
|
| 60 |
+
"""Validate the interval to be valid for the TMX request."""
|
| 61 |
+
if v is None or v == "day":
|
| 62 |
+
return "day"
|
| 63 |
+
if v in ("1M", "1mo", "month"):
|
| 64 |
+
return "month"
|
| 65 |
+
if "m" in v:
|
| 66 |
+
return int(v.replace("m", ""))
|
| 67 |
+
if "h" in v:
|
| 68 |
+
return int(v.replace("h", "")) * 60
|
| 69 |
+
if v == "1d":
|
| 70 |
+
return "day"
|
| 71 |
+
if v in ("1W", "1w", "week"):
|
| 72 |
+
return "week"
|
| 73 |
+
if v.isnumeric():
|
| 74 |
+
return int(v)
|
| 75 |
+
raise OpenBBError(f"Invalid interval: {v}")
|
| 76 |
+
|
| 77 |
+
|
| 78 |
+
class TmxEquityHistoricalData(EquityHistoricalData):
|
| 79 |
+
"""TMX Equity Historical Data."""
|
| 80 |
+
|
| 81 |
+
__alias_dict__ = {
|
| 82 |
+
"date": "datetime",
|
| 83 |
+
"open": "openPrice",
|
| 84 |
+
"close": "closePrice",
|
| 85 |
+
"transactions_value": "tradeValue",
|
| 86 |
+
"transactions": "numberOfTrade",
|
| 87 |
+
"change_percent": "changePercent",
|
| 88 |
+
}
|
| 89 |
+
|
| 90 |
+
vwap: Optional[float] = Field(
|
| 91 |
+
description="Volume weighted average price for the day.", default=None
|
| 92 |
+
)
|
| 93 |
+
change: Optional[float] = Field(description="Change in price.", default=None)
|
| 94 |
+
change_percent: Optional[float] = Field(
|
| 95 |
+
description="Change in price, as a normalized percentage.",
|
| 96 |
+
default=None,
|
| 97 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 98 |
+
)
|
| 99 |
+
transactions: Optional[int] = Field(
|
| 100 |
+
description="Total number of transactions recorded.", default=None
|
| 101 |
+
)
|
| 102 |
+
transactions_value: Optional[float] = Field(
|
| 103 |
+
description="Nominal value of recorded transactions.", default=None
|
| 104 |
+
)
|
| 105 |
+
|
| 106 |
+
@field_validator("date", mode="before", check_fields=False)
|
| 107 |
+
@classmethod
|
| 108 |
+
def date_validate(cls, v): # pylint: disable=W0221
|
| 109 |
+
"""Validate the datetime format."""
|
| 110 |
+
# pylint: disable=import-outside-toplevel
|
| 111 |
+
import pytz
|
| 112 |
+
|
| 113 |
+
if isinstance(v, (datetime, dateType)):
|
| 114 |
+
return v if v.hour != 0 and v.minute != 0 and v.second != 0 else v.date() # type: ignore
|
| 115 |
+
try:
|
| 116 |
+
dt = datetime.strptime(v, "%Y-%m-%d %H:%M:%S%z")
|
| 117 |
+
return dt.astimezone(pytz.timezone("America/New_York"))
|
| 118 |
+
except ValueError:
|
| 119 |
+
return datetime.strptime(v, "%Y-%m-%d")
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
class TmxEquityHistoricalFetcher(
|
| 123 |
+
Fetcher[TmxEquityHistoricalQueryParams, List[TmxEquityHistoricalData]]
|
| 124 |
+
):
|
| 125 |
+
"""TMX Equity Historical Fetcher."""
|
| 126 |
+
|
| 127 |
+
@staticmethod
|
| 128 |
+
def transform_query(params: Dict[str, Any]) -> TmxEquityHistoricalQueryParams:
|
| 129 |
+
"""Transform the query."""
|
| 130 |
+
adjustment = params.get("adjustment")
|
| 131 |
+
if (
|
| 132 |
+
adjustment is not None
|
| 133 |
+
and adjustment != "splits_only"
|
| 134 |
+
and params.get("interval") not in ["day", "1d"]
|
| 135 |
+
):
|
| 136 |
+
warn("Adjustment parameter is only available for daily data.")
|
| 137 |
+
return TmxEquityHistoricalQueryParams(**params)
|
| 138 |
+
|
| 139 |
+
@staticmethod
|
| 140 |
+
async def aextract_data(
|
| 141 |
+
query: TmxEquityHistoricalQueryParams,
|
| 142 |
+
credentials: Optional[Dict[str, str]],
|
| 143 |
+
**kwargs: Any,
|
| 144 |
+
) -> List[Dict]:
|
| 145 |
+
"""Return the raw data from the TMX endpoint."""
|
| 146 |
+
# pylint: disable=import-outside-toplevel
|
| 147 |
+
import asyncio # noqa
|
| 148 |
+
from openbb_tmx.utils.helpers import ( # noqa
|
| 149 |
+
get_daily_price_history,
|
| 150 |
+
get_intraday_price_history,
|
| 151 |
+
get_weekly_or_monthly_price_history,
|
| 152 |
+
)
|
| 153 |
+
|
| 154 |
+
results: List[Dict] = []
|
| 155 |
+
symbols = query.symbol.split(",")
|
| 156 |
+
|
| 157 |
+
async def create_task(symbol, results):
|
| 158 |
+
"""Make a POST request to the TMX GraphQL endpoint for a single ticker."""
|
| 159 |
+
data: List[Dict] = []
|
| 160 |
+
# A different request is used for each type of interval.
|
| 161 |
+
if query.interval == "day":
|
| 162 |
+
data = await get_daily_price_history(
|
| 163 |
+
symbol,
|
| 164 |
+
start_date=query.start_date,
|
| 165 |
+
end_date=query.end_date,
|
| 166 |
+
adjustment=query.adjustment,
|
| 167 |
+
)
|
| 168 |
+
if query.interval in ("week", "month"):
|
| 169 |
+
data = await get_weekly_or_monthly_price_history(
|
| 170 |
+
symbol,
|
| 171 |
+
start_date=query.start_date,
|
| 172 |
+
end_date=query.end_date,
|
| 173 |
+
interval=query.interval, # type: ignore
|
| 174 |
+
)
|
| 175 |
+
if isinstance(query.interval, int):
|
| 176 |
+
data = await get_intraday_price_history(
|
| 177 |
+
symbol,
|
| 178 |
+
interval=query.interval,
|
| 179 |
+
start_date=query.start_date,
|
| 180 |
+
end_date=query.end_date,
|
| 181 |
+
)
|
| 182 |
+
|
| 183 |
+
if data != []:
|
| 184 |
+
# Add the symbol to the data for multi-ticker support.
|
| 185 |
+
data = [{**d, "symbol": symbol} for d in data]
|
| 186 |
+
results.extend(data)
|
| 187 |
+
|
| 188 |
+
if data == []:
|
| 189 |
+
warn(f"No data found for {symbol}.")
|
| 190 |
+
|
| 191 |
+
return results
|
| 192 |
+
|
| 193 |
+
tasks = [create_task(symbol, results) for symbol in symbols]
|
| 194 |
+
|
| 195 |
+
await asyncio.gather(*tasks)
|
| 196 |
+
|
| 197 |
+
return results
|
| 198 |
+
|
| 199 |
+
@staticmethod
|
| 200 |
+
def transform_data(
|
| 201 |
+
query: TmxEquityHistoricalQueryParams,
|
| 202 |
+
data: List[Dict],
|
| 203 |
+
**kwargs: Any,
|
| 204 |
+
) -> List[TmxEquityHistoricalData]:
|
| 205 |
+
"""Return the transformed data."""
|
| 206 |
+
# pylint: disable=import-outside-toplevel
|
| 207 |
+
from pandas import DataFrame, to_datetime
|
| 208 |
+
|
| 209 |
+
results = DataFrame(data)
|
| 210 |
+
if results.empty or len(results) == 0:
|
| 211 |
+
raise EmptyDataError()
|
| 212 |
+
|
| 213 |
+
# Handle the date formatting differences.
|
| 214 |
+
results = results.rename(columns={"dateTime": "datetime"})
|
| 215 |
+
if query.interval != "day":
|
| 216 |
+
results["datetime"] = to_datetime(results["datetime"], utc=True)
|
| 217 |
+
if query.interval in ("week", "month"):
|
| 218 |
+
results["datetime"] = results["datetime"].dt.strftime("%Y-%m-%d")
|
| 219 |
+
else:
|
| 220 |
+
results["datetime"] = results["datetime"].dt.strftime(
|
| 221 |
+
"%Y-%m-%d %H:%M:%S%z"
|
| 222 |
+
)
|
| 223 |
+
if query.interval == "day":
|
| 224 |
+
results["datetime"] = to_datetime(results["datetime"]).dt.strftime(
|
| 225 |
+
"%Y-%m-%d"
|
| 226 |
+
)
|
| 227 |
+
|
| 228 |
+
symbols = query.symbol.split(",")
|
| 229 |
+
# If there are multiple symbols, sort the data by datetime and symbol.
|
| 230 |
+
if len(symbols) > 1:
|
| 231 |
+
results = results.set_index(["datetime", "symbol"]).sort_index()
|
| 232 |
+
results = results.reset_index()
|
| 233 |
+
# If there is only one symbol, drop the symbol column.
|
| 234 |
+
if len(symbols) == 1:
|
| 235 |
+
results = results.drop(columns=["symbol"])
|
| 236 |
+
# Normalizes the percent change values.
|
| 237 |
+
if "changePercent" in results.columns:
|
| 238 |
+
results["changePercent"] = results["changePercent"].astype(float) / 100
|
| 239 |
+
# For the week beginning 2011-09-12 replace the openPrice NaN with 0 because of 9/11.
|
| 240 |
+
if query.interval == "week":
|
| 241 |
+
results["open"] = results["open"].fillna(0)
|
| 242 |
+
# Convert any NaN values to None.
|
| 243 |
+
results = results.fillna(value="N/A").replace("N/A", None)
|
| 244 |
+
|
| 245 |
+
return [
|
| 246 |
+
TmxEquityHistoricalData.model_validate(d)
|
| 247 |
+
for d in results.to_dict("records")
|
| 248 |
+
]
|
openbb_platform/providers/tmx/openbb_tmx/models/equity_profile.py
ADDED
|
@@ -0,0 +1,167 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Equity Profile fetcher."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from typing import Any, Dict, List, Optional
|
| 6 |
+
|
| 7 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 8 |
+
from openbb_core.provider.standard_models.equity_info import (
|
| 9 |
+
EquityInfoData,
|
| 10 |
+
EquityInfoQueryParams,
|
| 11 |
+
)
|
| 12 |
+
from pydantic import Field, model_validator
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TmxEquityProfileQueryParams(EquityInfoQueryParams):
|
| 16 |
+
"""TMX Equity Profile query params."""
|
| 17 |
+
|
| 18 |
+
__json_schema_extra__ = {"symbol": {"multiple_items_allowed": True}}
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class TmxEquityProfileData(EquityInfoData):
|
| 22 |
+
"""TMX Equity Profile Data."""
|
| 23 |
+
|
| 24 |
+
__alias_dict__ = {
|
| 25 |
+
"short_description": "shortDescription",
|
| 26 |
+
"long_description": "longDescription",
|
| 27 |
+
"company_url": "website",
|
| 28 |
+
"business_phone_no": "phoneNumber",
|
| 29 |
+
"business_address": "fullAddress",
|
| 30 |
+
"stock_exchange": "exchangeCode",
|
| 31 |
+
"industry_category": "industry",
|
| 32 |
+
"industry_group": "qmdescription",
|
| 33 |
+
"issue_type": "issueType",
|
| 34 |
+
"share_outstanding": "shareOutStanding",
|
| 35 |
+
"shares_escrow": "sharesESCROW",
|
| 36 |
+
"total_shares_outstanding": "totalSharesOutStanding",
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
email: Optional[str] = Field(description="The email of the company.", default=None)
|
| 40 |
+
issue_type: Optional[str] = Field(
|
| 41 |
+
description="The issuance type of the asset.",
|
| 42 |
+
default=None,
|
| 43 |
+
)
|
| 44 |
+
shares_outstanding: Optional[int] = Field(
|
| 45 |
+
description="The number of listed shares outstanding.",
|
| 46 |
+
default=None,
|
| 47 |
+
)
|
| 48 |
+
shares_escrow: Optional[int] = Field(
|
| 49 |
+
description="The number of shares held in escrow.",
|
| 50 |
+
default=None,
|
| 51 |
+
)
|
| 52 |
+
shares_total: Optional[int] = Field(
|
| 53 |
+
description="The total number of shares outstanding from all classes.",
|
| 54 |
+
default=None,
|
| 55 |
+
)
|
| 56 |
+
dividend_frequency: Optional[str] = Field(
|
| 57 |
+
description="The dividend frequency.", default=None
|
| 58 |
+
)
|
| 59 |
+
|
| 60 |
+
@model_validator(mode="before")
|
| 61 |
+
@classmethod
|
| 62 |
+
def validate_empty_strings(cls, values) -> Dict:
|
| 63 |
+
"""Validate the query parameters."""
|
| 64 |
+
return {k: None if v == "" else v for k, v in values.items()}
|
| 65 |
+
|
| 66 |
+
|
| 67 |
+
class TmxEquityProfileFetcher(
|
| 68 |
+
Fetcher[
|
| 69 |
+
TmxEquityProfileQueryParams,
|
| 70 |
+
List[TmxEquityProfileData],
|
| 71 |
+
]
|
| 72 |
+
):
|
| 73 |
+
"""TMX Equity Profile Fetcher."""
|
| 74 |
+
|
| 75 |
+
@staticmethod
|
| 76 |
+
def transform_query(params: Dict[str, Any]) -> TmxEquityProfileQueryParams:
|
| 77 |
+
"""Transform the query."""
|
| 78 |
+
return TmxEquityProfileQueryParams(**params)
|
| 79 |
+
|
| 80 |
+
@staticmethod
|
| 81 |
+
async def aextract_data(
|
| 82 |
+
query: TmxEquityProfileQueryParams,
|
| 83 |
+
credentials: Optional[Dict[str, str]],
|
| 84 |
+
**kwargs: Any,
|
| 85 |
+
) -> List[Dict]:
|
| 86 |
+
"""Return the raw data from the TMX endpoint."""
|
| 87 |
+
# pylint: disable=import-outside-toplevel
|
| 88 |
+
import asyncio # noqa
|
| 89 |
+
import json # noqa
|
| 90 |
+
from openbb_tmx.utils import gql # noqa
|
| 91 |
+
from openbb_tmx.utils.helpers import get_data_from_gql, get_random_agent # noqa
|
| 92 |
+
|
| 93 |
+
symbols = query.symbol.split(",")
|
| 94 |
+
|
| 95 |
+
# The list where the results will be stored and appended to.
|
| 96 |
+
results: List[Dict] = []
|
| 97 |
+
user_agent = get_random_agent()
|
| 98 |
+
|
| 99 |
+
url = "https://app-money.tmx.com/graphql"
|
| 100 |
+
|
| 101 |
+
async def create_task(symbol: str, results) -> None:
|
| 102 |
+
"""Make a POST request to the TMX GraphQL endpoint for a single symbol."""
|
| 103 |
+
symbol = (
|
| 104 |
+
symbol.upper().replace("-", ".").replace(".TO", "").replace(".TSX", "")
|
| 105 |
+
)
|
| 106 |
+
|
| 107 |
+
payload = gql.stock_info_payload.copy()
|
| 108 |
+
payload["variables"]["symbol"] = symbol
|
| 109 |
+
|
| 110 |
+
data = {}
|
| 111 |
+
r = await get_data_from_gql(
|
| 112 |
+
method="POST",
|
| 113 |
+
url=url,
|
| 114 |
+
data=json.dumps(payload),
|
| 115 |
+
headers={
|
| 116 |
+
"authority": "app-money.tmx.com",
|
| 117 |
+
"referer": f"https://money.tmx.com/en/quote/{symbol}",
|
| 118 |
+
"locale": "en",
|
| 119 |
+
"Content-Type": "application/json",
|
| 120 |
+
"User-Agent": user_agent,
|
| 121 |
+
"Accept": "*/*",
|
| 122 |
+
},
|
| 123 |
+
timeout=3,
|
| 124 |
+
)
|
| 125 |
+
if r["data"].get("getQuoteBySymbol"):
|
| 126 |
+
data = r["data"]["getQuoteBySymbol"]
|
| 127 |
+
results.append(data)
|
| 128 |
+
|
| 129 |
+
tasks = [create_task(symbol, results) for symbol in symbols]
|
| 130 |
+
await asyncio.gather(*tasks)
|
| 131 |
+
return results
|
| 132 |
+
|
| 133 |
+
@staticmethod
|
| 134 |
+
def transform_data(
|
| 135 |
+
query: TmxEquityProfileQueryParams,
|
| 136 |
+
data: List[Dict],
|
| 137 |
+
**kwargs: Any,
|
| 138 |
+
) -> List[TmxEquityProfileData]:
|
| 139 |
+
"""Return the transformed data."""
|
| 140 |
+
# Get only the items associated with `equity.profile()`.
|
| 141 |
+
items_list = [
|
| 142 |
+
"shortDescription",
|
| 143 |
+
"longDescription",
|
| 144 |
+
"website",
|
| 145 |
+
"phoneNumber",
|
| 146 |
+
"fullAddress",
|
| 147 |
+
"sector",
|
| 148 |
+
"qmdescription",
|
| 149 |
+
"industry",
|
| 150 |
+
"exchangeCode",
|
| 151 |
+
"shareOutStanding",
|
| 152 |
+
"sharesESCROW",
|
| 153 |
+
"totalSharesOutStanding",
|
| 154 |
+
"email",
|
| 155 |
+
"issueType",
|
| 156 |
+
"name",
|
| 157 |
+
"symbol",
|
| 158 |
+
"dividendFrequency",
|
| 159 |
+
"employees",
|
| 160 |
+
]
|
| 161 |
+
data = [{k: v for k, v in d.items() if k in items_list} for d in data]
|
| 162 |
+
# Sort the data by the order of the symbols in the query.
|
| 163 |
+
symbols = query.symbol.split(",")
|
| 164 |
+
symbol_to_index = {symbol: index for index, symbol in enumerate(symbols)}
|
| 165 |
+
data = sorted(data, key=lambda d: symbol_to_index[d["symbol"]])
|
| 166 |
+
|
| 167 |
+
return [TmxEquityProfileData.model_validate(d) for d in data]
|
openbb_platform/providers/tmx/openbb_tmx/models/equity_quote.py
ADDED
|
@@ -0,0 +1,382 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Equity Profile fetcher."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from datetime import (
|
| 6 |
+
date as dateType,
|
| 7 |
+
datetime,
|
| 8 |
+
)
|
| 9 |
+
from typing import Any, Dict, List, Optional, Union
|
| 10 |
+
from warnings import warn
|
| 11 |
+
|
| 12 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 13 |
+
from openbb_core.provider.standard_models.equity_quote import (
|
| 14 |
+
EquityQuoteData,
|
| 15 |
+
EquityQuoteQueryParams,
|
| 16 |
+
)
|
| 17 |
+
from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS
|
| 18 |
+
from pydantic import Field, field_validator
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class TmxEquityQuoteQueryParams(EquityQuoteQueryParams):
|
| 22 |
+
"""TMX Equity Profile query params."""
|
| 23 |
+
|
| 24 |
+
__json_schema_extra__ = {"symbol": {"multiple_items_allowed": True}}
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class TmxEquityQuoteData(EquityQuoteData):
|
| 28 |
+
"""TMX Equity Profile Data."""
|
| 29 |
+
|
| 30 |
+
__alias_dict__ = {
|
| 31 |
+
"last_price": "price",
|
| 32 |
+
"open": "openPrice",
|
| 33 |
+
"high": "dayHigh",
|
| 34 |
+
"low": "dayLow",
|
| 35 |
+
"change": "priceChange",
|
| 36 |
+
"change_percent": "percentChange",
|
| 37 |
+
"prev_close": "prevClose",
|
| 38 |
+
"stock_exchange": "exchangeCode",
|
| 39 |
+
"industry_category": "industry",
|
| 40 |
+
"industry_group": "qmdescription",
|
| 41 |
+
"exchange": "exchangeCode",
|
| 42 |
+
"security_type": "datatype",
|
| 43 |
+
"year_high": "weeks52high",
|
| 44 |
+
"year_low": "weeks52low",
|
| 45 |
+
"ma_21": "day21MovingAvg",
|
| 46 |
+
"ma_50": "day50MovingAvg",
|
| 47 |
+
"ma_200": "day200MovingAvg",
|
| 48 |
+
"volume_avg_10d": "averageVolume10D",
|
| 49 |
+
"volume_avg_30d": "averageVolume30D",
|
| 50 |
+
"volume_avg_50d": "averageVolume50D",
|
| 51 |
+
"market_cap": "marketCap",
|
| 52 |
+
"market_cap_all_classes": "MarketCapAllClasses",
|
| 53 |
+
"div_amount": "dividendAmount",
|
| 54 |
+
"div_currency": "dividendCurrency",
|
| 55 |
+
"div_yield": "dividendYield",
|
| 56 |
+
"div_freq": "dividendFrequency",
|
| 57 |
+
"div_ex_date": "exDividendDate",
|
| 58 |
+
"div_pay_date": "dividendPayDate",
|
| 59 |
+
"div_growth_3y": "dividend3Years",
|
| 60 |
+
"div_growth_5y": "dividend5Years",
|
| 61 |
+
"pe": "peRatio",
|
| 62 |
+
"debt_to_equity": "totalDebtToEquity",
|
| 63 |
+
"price_to_book": "priceToBook",
|
| 64 |
+
"price_to_cf": "priceToCashFlow",
|
| 65 |
+
"return_on_equity": "returnOnEquity",
|
| 66 |
+
"return_on_assets": "returnOnAssets",
|
| 67 |
+
"shares_outstanding": "shareOutStanding",
|
| 68 |
+
"shares_escrow": "sharesESCROW",
|
| 69 |
+
"shares_total": "totalSharesOutStanding",
|
| 70 |
+
}
|
| 71 |
+
|
| 72 |
+
name: Optional[str] = Field(default=None, description="The name of the asset.")
|
| 73 |
+
security_type: Optional[str] = Field(
|
| 74 |
+
description="The issuance type of the asset.", default=None
|
| 75 |
+
)
|
| 76 |
+
exchange: Optional[str] = Field(
|
| 77 |
+
default=None,
|
| 78 |
+
description="The listing exchange code.",
|
| 79 |
+
)
|
| 80 |
+
sector: Optional[str] = Field(default=None, description="The sector of the asset.")
|
| 81 |
+
industry_category: Optional[str] = Field(
|
| 82 |
+
default=None,
|
| 83 |
+
description="The industry category of the asset.",
|
| 84 |
+
)
|
| 85 |
+
industry_group: Optional[str] = Field(
|
| 86 |
+
default=None,
|
| 87 |
+
description="The industry group of the asset.",
|
| 88 |
+
)
|
| 89 |
+
last_price: Optional[float] = Field(
|
| 90 |
+
default=None, description="The last price of the asset."
|
| 91 |
+
)
|
| 92 |
+
open: Optional[float] = Field(
|
| 93 |
+
default=None,
|
| 94 |
+
description=DATA_DESCRIPTIONS.get("open", ""),
|
| 95 |
+
)
|
| 96 |
+
high: Optional[float] = Field(
|
| 97 |
+
default=None,
|
| 98 |
+
description=DATA_DESCRIPTIONS.get("high", ""),
|
| 99 |
+
)
|
| 100 |
+
low: Optional[float] = Field(
|
| 101 |
+
default=None,
|
| 102 |
+
description=DATA_DESCRIPTIONS.get("low", ""),
|
| 103 |
+
)
|
| 104 |
+
close: Optional[float] = Field(
|
| 105 |
+
default=None,
|
| 106 |
+
)
|
| 107 |
+
vwap: Optional[float] = Field(
|
| 108 |
+
default=None, description=DATA_DESCRIPTIONS.get("vwap", "")
|
| 109 |
+
)
|
| 110 |
+
volume: Optional[int] = Field(
|
| 111 |
+
default=None, description=DATA_DESCRIPTIONS.get("vwap", "")
|
| 112 |
+
)
|
| 113 |
+
prev_close: Optional[float] = Field(
|
| 114 |
+
default=None, description=DATA_DESCRIPTIONS.get("prev_close", "")
|
| 115 |
+
)
|
| 116 |
+
change: Optional[float] = Field(
|
| 117 |
+
default=None,
|
| 118 |
+
description="The change in price.",
|
| 119 |
+
)
|
| 120 |
+
change_percent: Optional[float] = Field(
|
| 121 |
+
default=None,
|
| 122 |
+
description="The change in price as a normalized percent.",
|
| 123 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 124 |
+
)
|
| 125 |
+
year_high: Optional[float] = Field(
|
| 126 |
+
description="Fifty-two week high.",
|
| 127 |
+
default=None,
|
| 128 |
+
)
|
| 129 |
+
year_low: Optional[float] = Field(
|
| 130 |
+
description="Fifty-two week low.",
|
| 131 |
+
default=None,
|
| 132 |
+
)
|
| 133 |
+
ma_21: Optional[float] = Field(
|
| 134 |
+
description="Twenty-one day moving average.",
|
| 135 |
+
default=None,
|
| 136 |
+
)
|
| 137 |
+
ma_50: Optional[float] = Field(
|
| 138 |
+
description="Fifty day moving average.",
|
| 139 |
+
default=None,
|
| 140 |
+
)
|
| 141 |
+
ma_200: Optional[float] = Field(
|
| 142 |
+
description="Two-hundred day moving average.",
|
| 143 |
+
default=None,
|
| 144 |
+
)
|
| 145 |
+
volume_avg_10d: Optional[int] = Field(
|
| 146 |
+
description="Ten day average volume.",
|
| 147 |
+
default=None,
|
| 148 |
+
)
|
| 149 |
+
volume_avg_30d: Optional[int] = Field(
|
| 150 |
+
description="Thirty day average volume.",
|
| 151 |
+
default=None,
|
| 152 |
+
)
|
| 153 |
+
volume_avg_50d: Optional[int] = Field(
|
| 154 |
+
description="Fifty day average volume.",
|
| 155 |
+
default=None,
|
| 156 |
+
)
|
| 157 |
+
market_cap: Optional[int] = Field(
|
| 158 |
+
description="Market capitalization.",
|
| 159 |
+
default=None,
|
| 160 |
+
)
|
| 161 |
+
market_cap_all_classes: Optional[int] = Field(
|
| 162 |
+
description="Market capitalization of all share classes.",
|
| 163 |
+
default=None,
|
| 164 |
+
)
|
| 165 |
+
div_amount: Optional[float] = Field(
|
| 166 |
+
description="The most recent dividend amount.",
|
| 167 |
+
default=None,
|
| 168 |
+
)
|
| 169 |
+
div_currency: Optional[str] = Field(
|
| 170 |
+
description="The currency the dividend is paid in.",
|
| 171 |
+
default=None,
|
| 172 |
+
)
|
| 173 |
+
div_yield: Optional[float] = Field(
|
| 174 |
+
description="The dividend yield as a normalized percentage.",
|
| 175 |
+
default=None,
|
| 176 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 177 |
+
)
|
| 178 |
+
div_freq: Optional[str] = Field(
|
| 179 |
+
description="The frequency of dividend payments.",
|
| 180 |
+
default=None,
|
| 181 |
+
)
|
| 182 |
+
div_ex_date: Optional[dateType] = Field(
|
| 183 |
+
description="The ex-dividend date.",
|
| 184 |
+
default=None,
|
| 185 |
+
)
|
| 186 |
+
div_pay_date: Optional[dateType] = Field(
|
| 187 |
+
description="The next dividend ayment date.",
|
| 188 |
+
default=None,
|
| 189 |
+
)
|
| 190 |
+
div_growth_3y: Optional[Union[float, str]] = Field(
|
| 191 |
+
description="The three year dividend growth as a normalized percentage.",
|
| 192 |
+
default=None,
|
| 193 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 194 |
+
)
|
| 195 |
+
div_growth_5y: Optional[Union[float, str]] = Field(
|
| 196 |
+
description="The five year dividend growth as a normalized percentage.",
|
| 197 |
+
default=None,
|
| 198 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 199 |
+
)
|
| 200 |
+
pe: Optional[Union[float, str]] = Field(
|
| 201 |
+
description="The price to earnings ratio.",
|
| 202 |
+
default=None,
|
| 203 |
+
)
|
| 204 |
+
eps: Optional[Union[float, str]] = Field(
|
| 205 |
+
description="The earnings per share.", default=None
|
| 206 |
+
)
|
| 207 |
+
debt_to_equity: Optional[Union[float, str]] = Field(
|
| 208 |
+
description="The debt to equity ratio.",
|
| 209 |
+
default=None,
|
| 210 |
+
)
|
| 211 |
+
price_to_book: Optional[Union[float, str]] = Field(
|
| 212 |
+
description="The price to book ratio.",
|
| 213 |
+
default=None,
|
| 214 |
+
)
|
| 215 |
+
price_to_cf: Optional[Union[float, str]] = Field(
|
| 216 |
+
description="The price to cash flow ratio.",
|
| 217 |
+
default=None,
|
| 218 |
+
)
|
| 219 |
+
return_on_equity: Optional[Union[float, str]] = Field(
|
| 220 |
+
description="The return on equity, as a normalized percentage.",
|
| 221 |
+
default=None,
|
| 222 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 223 |
+
)
|
| 224 |
+
return_on_assets: Optional[Union[float, str]] = Field(
|
| 225 |
+
description="The return on assets, as a normalized percentage.",
|
| 226 |
+
default=None,
|
| 227 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 228 |
+
)
|
| 229 |
+
beta: Optional[Union[float, str]] = Field(
|
| 230 |
+
description="The beta relative to the TSX Composite.", default=None
|
| 231 |
+
)
|
| 232 |
+
alpha: Optional[Union[float, str]] = Field(
|
| 233 |
+
description="The alpha relative to the TSX Composite.", default=None
|
| 234 |
+
)
|
| 235 |
+
shares_outstanding: Optional[int] = Field(
|
| 236 |
+
description="The number of listed shares outstanding.",
|
| 237 |
+
default=None,
|
| 238 |
+
)
|
| 239 |
+
shares_escrow: Optional[int] = Field(
|
| 240 |
+
description="The number of shares held in escrow.",
|
| 241 |
+
default=None,
|
| 242 |
+
)
|
| 243 |
+
shares_total: Optional[int] = Field(
|
| 244 |
+
description="The total number of shares outstanding from all classes.",
|
| 245 |
+
default=None,
|
| 246 |
+
)
|
| 247 |
+
|
| 248 |
+
@field_validator(
|
| 249 |
+
"div_ex_date",
|
| 250 |
+
"div_pay_date",
|
| 251 |
+
mode="before",
|
| 252 |
+
check_fields=False,
|
| 253 |
+
)
|
| 254 |
+
@classmethod
|
| 255 |
+
def date_validate(cls, v): # pylint: disable=E0213
|
| 256 |
+
"""Return the datetime object from the date string."""
|
| 257 |
+
if v:
|
| 258 |
+
try:
|
| 259 |
+
return datetime.strptime(v, "%Y-%m-%d").date()
|
| 260 |
+
except ValueError:
|
| 261 |
+
return datetime.strptime(v, "%Y-%m-%d %H:%M:%S.%f").date()
|
| 262 |
+
return None
|
| 263 |
+
|
| 264 |
+
@field_validator(
|
| 265 |
+
"return_on_equity",
|
| 266 |
+
"return_on_assets",
|
| 267 |
+
"div_yield",
|
| 268 |
+
"div_growth_3y",
|
| 269 |
+
"div_growth_5y",
|
| 270 |
+
"change_percent",
|
| 271 |
+
mode="before",
|
| 272 |
+
check_fields=False,
|
| 273 |
+
)
|
| 274 |
+
@classmethod
|
| 275 |
+
def normalize_percent(cls, v):
|
| 276 |
+
"""Return percents as normalized percentage points."""
|
| 277 |
+
return round(float(v) / 100, 6) if v else None
|
| 278 |
+
|
| 279 |
+
|
| 280 |
+
class TmxEquityQuoteFetcher(
|
| 281 |
+
Fetcher[
|
| 282 |
+
TmxEquityQuoteQueryParams,
|
| 283 |
+
List[TmxEquityQuoteData],
|
| 284 |
+
]
|
| 285 |
+
):
|
| 286 |
+
"""TMX Equity Quote Fetcher."""
|
| 287 |
+
|
| 288 |
+
@staticmethod
|
| 289 |
+
def transform_query(params: Dict[str, Any]) -> TmxEquityQuoteQueryParams:
|
| 290 |
+
"""Transform the query."""
|
| 291 |
+
return TmxEquityQuoteQueryParams(**params)
|
| 292 |
+
|
| 293 |
+
@staticmethod
|
| 294 |
+
async def aextract_data(
|
| 295 |
+
query: TmxEquityQuoteQueryParams,
|
| 296 |
+
credentials: Optional[Dict[str, str]],
|
| 297 |
+
**kwargs: Any,
|
| 298 |
+
) -> List[Dict]:
|
| 299 |
+
"""Return the raw data from the TMX endpoint."""
|
| 300 |
+
# pylint: disable=import-outside-toplevel
|
| 301 |
+
import asyncio # noqa
|
| 302 |
+
import json # noqa
|
| 303 |
+
from openbb_tmx.utils import gql # noqa
|
| 304 |
+
from openbb_tmx.utils.helpers import get_data_from_gql, get_random_agent # noqa
|
| 305 |
+
|
| 306 |
+
symbols = query.symbol.split(",")
|
| 307 |
+
|
| 308 |
+
# The list where the results will be stored and appended to.
|
| 309 |
+
results: List[Dict] = []
|
| 310 |
+
user_agent = get_random_agent()
|
| 311 |
+
|
| 312 |
+
url = "https://app-money.tmx.com/graphql"
|
| 313 |
+
|
| 314 |
+
async def create_task(symbol: str, results) -> None:
|
| 315 |
+
"""Make a POST request to the TMX GraphQL endpoint for a single symbol."""
|
| 316 |
+
symbol = (
|
| 317 |
+
symbol.upper().replace("-", ".").replace(".TO", "").replace(".TSX", "")
|
| 318 |
+
)
|
| 319 |
+
|
| 320 |
+
payload = gql.stock_info_payload.copy()
|
| 321 |
+
payload["variables"]["symbol"] = symbol
|
| 322 |
+
|
| 323 |
+
data = {}
|
| 324 |
+
r = await get_data_from_gql(
|
| 325 |
+
method="POST",
|
| 326 |
+
url=url,
|
| 327 |
+
data=json.dumps(payload),
|
| 328 |
+
headers={
|
| 329 |
+
"authority": "app-money.tmx.com",
|
| 330 |
+
"referer": f"https://money.tmx.com/en/quote/{symbol}",
|
| 331 |
+
"locale": "en",
|
| 332 |
+
"Content-Type": "application/json",
|
| 333 |
+
"User-Agent": user_agent,
|
| 334 |
+
"Accept": "*/*",
|
| 335 |
+
},
|
| 336 |
+
timeout=3,
|
| 337 |
+
)
|
| 338 |
+
if r["data"].get("getQuoteBySymbol"):
|
| 339 |
+
data = r["data"]["getQuoteBySymbol"]
|
| 340 |
+
results.append(data)
|
| 341 |
+
else:
|
| 342 |
+
warn(f"Could not get data for {symbol}.")
|
| 343 |
+
|
| 344 |
+
tasks = [create_task(symbol, results) for symbol in symbols]
|
| 345 |
+
await asyncio.gather(*tasks)
|
| 346 |
+
return results
|
| 347 |
+
|
| 348 |
+
@staticmethod
|
| 349 |
+
def transform_data(
|
| 350 |
+
query: TmxEquityQuoteQueryParams,
|
| 351 |
+
data: List[Dict],
|
| 352 |
+
**kwargs: Any,
|
| 353 |
+
) -> List[TmxEquityQuoteData]:
|
| 354 |
+
"""Return the transformed data."""
|
| 355 |
+
# pylint: disable=import-outside-toplevel
|
| 356 |
+
from numpy import nan
|
| 357 |
+
|
| 358 |
+
# Remove the items associated with `equity.profile()`.
|
| 359 |
+
items_list = [
|
| 360 |
+
"shortDescription",
|
| 361 |
+
"longDescription",
|
| 362 |
+
"website",
|
| 363 |
+
"phoneNumber",
|
| 364 |
+
"fullAddress",
|
| 365 |
+
"email",
|
| 366 |
+
"issueType",
|
| 367 |
+
"exchangeName",
|
| 368 |
+
"employees",
|
| 369 |
+
"exShortName",
|
| 370 |
+
]
|
| 371 |
+
data = [{k: v for k, v in d.items() if k not in items_list} for d in data]
|
| 372 |
+
# Replace all NaN values with None.
|
| 373 |
+
for d in data:
|
| 374 |
+
for k, v in d.items():
|
| 375 |
+
if v in (nan, 0, ""):
|
| 376 |
+
d[k] = None
|
| 377 |
+
# Sort the data by the order of the symbols in the query.
|
| 378 |
+
symbols = query.symbol.split(",")
|
| 379 |
+
symbol_to_index = {symbol: index for index, symbol in enumerate(symbols)}
|
| 380 |
+
data = sorted(data, key=lambda d: symbol_to_index[d["symbol"]])
|
| 381 |
+
|
| 382 |
+
return [TmxEquityQuoteData.model_validate(d) for d in data]
|
openbb_platform/providers/tmx/openbb_tmx/models/equity_search.py
ADDED
|
@@ -0,0 +1,72 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Equity Search fetcher."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from typing import Any, Dict, List, Optional
|
| 6 |
+
|
| 7 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 8 |
+
from openbb_core.provider.standard_models.equity_search import (
|
| 9 |
+
EquitySearchData,
|
| 10 |
+
EquitySearchQueryParams,
|
| 11 |
+
)
|
| 12 |
+
from pydantic import Field
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TmxEquitySearchQueryParams(EquitySearchQueryParams):
|
| 16 |
+
"""TMX Equity Search query.
|
| 17 |
+
|
| 18 |
+
Source: https://www.tmx.com/
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
use_cache: bool = Field(
|
| 22 |
+
default=True,
|
| 23 |
+
description="Whether to use a cached request. The list of companies is cached for two days.",
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class TmxEquitySearchData(EquitySearchData):
|
| 28 |
+
"""TMX Equity Search Data."""
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
class TmxEquitySearchFetcher(
|
| 32 |
+
Fetcher[
|
| 33 |
+
TmxEquitySearchQueryParams,
|
| 34 |
+
List[TmxEquitySearchData],
|
| 35 |
+
]
|
| 36 |
+
):
|
| 37 |
+
"""TMX Equity Search Fetcher."""
|
| 38 |
+
|
| 39 |
+
@staticmethod
|
| 40 |
+
def transform_query(params: Dict[str, Any]) -> TmxEquitySearchQueryParams:
|
| 41 |
+
"""Transform the query."""
|
| 42 |
+
return TmxEquitySearchQueryParams(**params)
|
| 43 |
+
|
| 44 |
+
@staticmethod
|
| 45 |
+
async def aextract_data(
|
| 46 |
+
query: TmxEquitySearchQueryParams,
|
| 47 |
+
credentials: Optional[Dict[str, str]],
|
| 48 |
+
**kwargs: Any,
|
| 49 |
+
) -> List[Dict]:
|
| 50 |
+
"""Return the raw data from the TMX endpoint."""
|
| 51 |
+
# pylint: disable=import-outside-toplevel
|
| 52 |
+
from openbb_tmx.utils.helpers import get_all_tmx_companies
|
| 53 |
+
from pandas import DataFrame
|
| 54 |
+
|
| 55 |
+
companies = await get_all_tmx_companies(use_cache=query.use_cache)
|
| 56 |
+
results = DataFrame(index=companies, data=companies.values(), columns=["name"])
|
| 57 |
+
results = results.reset_index().rename(columns={"index": "symbol"})
|
| 58 |
+
|
| 59 |
+
if query:
|
| 60 |
+
results = results[
|
| 61 |
+
results["name"].str.contains(query.query, case=False)
|
| 62 |
+
| results["symbol"].str.contains(query.query, case=False)
|
| 63 |
+
]
|
| 64 |
+
|
| 65 |
+
return results.reset_index(drop=True).astype(str).to_dict("records")
|
| 66 |
+
|
| 67 |
+
@staticmethod
|
| 68 |
+
def transform_data(
|
| 69 |
+
query: TmxEquitySearchQueryParams, data: List[Dict], **kwargs: Any
|
| 70 |
+
) -> List[TmxEquitySearchData]:
|
| 71 |
+
"""Transform the data to the standard format."""
|
| 72 |
+
return [TmxEquitySearchData.model_validate(d) for d in data]
|
openbb_platform/providers/tmx/openbb_tmx/models/etf_countries.py
ADDED
|
@@ -0,0 +1,114 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX ETF Countries fetcher."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from typing import Any, Dict, List, Optional
|
| 6 |
+
from warnings import warn
|
| 7 |
+
|
| 8 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 9 |
+
from openbb_core.provider.standard_models.etf_countries import (
|
| 10 |
+
EtfCountriesData,
|
| 11 |
+
EtfCountriesQueryParams,
|
| 12 |
+
)
|
| 13 |
+
from pydantic import Field
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class TmxEtfCountriesQueryParams(EtfCountriesQueryParams):
|
| 17 |
+
"""TMX ETF Countries Query Params"""
|
| 18 |
+
|
| 19 |
+
__json_schema_extra__ = {"symbol": {"multiple_items_allowed": True}}
|
| 20 |
+
|
| 21 |
+
use_cache: bool = Field(
|
| 22 |
+
default=True,
|
| 23 |
+
description="Whether to use a cached request. All ETF data comes from a single JSON file that is updated daily."
|
| 24 |
+
+ " To bypass, set to False. If True, the data will be cached for 4 hours.",
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class TmxEtfCountriesData(EtfCountriesData):
|
| 29 |
+
"""TMX ETF Countries Data."""
|
| 30 |
+
|
| 31 |
+
|
| 32 |
+
class TmxEtfCountriesFetcher(
|
| 33 |
+
Fetcher[
|
| 34 |
+
TmxEtfCountriesQueryParams,
|
| 35 |
+
List[TmxEtfCountriesData],
|
| 36 |
+
]
|
| 37 |
+
):
|
| 38 |
+
"""TMX ETF Countries Fetcher."""
|
| 39 |
+
|
| 40 |
+
@staticmethod
|
| 41 |
+
def transform_query(params: Dict[str, Any]) -> TmxEtfCountriesQueryParams:
|
| 42 |
+
"""Transform the query."""
|
| 43 |
+
return TmxEtfCountriesQueryParams(**params)
|
| 44 |
+
|
| 45 |
+
@staticmethod
|
| 46 |
+
async def aextract_data(
|
| 47 |
+
query: TmxEtfCountriesQueryParams,
|
| 48 |
+
credentials: Optional[Dict[str, str]],
|
| 49 |
+
**kwargs: Any,
|
| 50 |
+
) -> List[Dict]:
|
| 51 |
+
"""Return the raw data from the TMX endpoint."""
|
| 52 |
+
# pylint: disable=import-outside-toplevel
|
| 53 |
+
from openbb_core.provider.utils.errors import EmptyDataError # noqa
|
| 54 |
+
from openbb_tmx.utils.helpers import get_all_etfs
|
| 55 |
+
from pandas import DataFrame
|
| 56 |
+
|
| 57 |
+
symbols = (
|
| 58 |
+
query.symbol.split(",") if "," in query.symbol else [query.symbol.upper()]
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
_data = DataFrame(await get_all_etfs(use_cache=query.use_cache))
|
| 62 |
+
results = {}
|
| 63 |
+
for symbol in symbols:
|
| 64 |
+
data = {}
|
| 65 |
+
if ".TO" in symbol:
|
| 66 |
+
symbol = symbol.replace(".TO", "") # noqa
|
| 67 |
+
_target = _data[_data["symbol"] == symbol]["regions"]
|
| 68 |
+
target = DataFrame()
|
| 69 |
+
if len(_target) > 0:
|
| 70 |
+
target = DataFrame.from_records(_target.iloc[0]).rename(
|
| 71 |
+
columns={"name": "country", "percent": "weight"}
|
| 72 |
+
)
|
| 73 |
+
if not target.empty:
|
| 74 |
+
target = target.set_index("country")
|
| 75 |
+
for i in target.index:
|
| 76 |
+
data.update({i: target.loc[i]["weight"]})
|
| 77 |
+
if data:
|
| 78 |
+
results.update({symbol: data})
|
| 79 |
+
else:
|
| 80 |
+
warn(f"No data found for {symbol}")
|
| 81 |
+
|
| 82 |
+
if not results:
|
| 83 |
+
raise EmptyDataError("No countries info found for the given symbol(s).")
|
| 84 |
+
|
| 85 |
+
output = (
|
| 86 |
+
DataFrame(results)
|
| 87 |
+
.transpose()
|
| 88 |
+
.reset_index()
|
| 89 |
+
.rename(columns={"index": "symbol"})
|
| 90 |
+
).transpose()
|
| 91 |
+
output.columns = output.loc["symbol"].to_list()
|
| 92 |
+
output.drop("symbol", axis=0, inplace=True)
|
| 93 |
+
return (
|
| 94 |
+
output.reset_index().rename(columns={"index": "country"}).to_dict("records")
|
| 95 |
+
)
|
| 96 |
+
|
| 97 |
+
@staticmethod
|
| 98 |
+
def transform_data(
|
| 99 |
+
query: TmxEtfCountriesQueryParams, data: List[Dict], **kwargs: Any
|
| 100 |
+
) -> List[TmxEtfCountriesData]:
|
| 101 |
+
"""Return the transformed data."""
|
| 102 |
+
# pylint: disable=import-outside-toplevel
|
| 103 |
+
from numpy import nan
|
| 104 |
+
from pandas import DataFrame
|
| 105 |
+
|
| 106 |
+
output = DataFrame(data)
|
| 107 |
+
for col in output.columns.to_list():
|
| 108 |
+
if col != "country":
|
| 109 |
+
output[col] = output[col].astype(float) / 100
|
| 110 |
+
output = output.replace({nan: None})
|
| 111 |
+
|
| 112 |
+
return [
|
| 113 |
+
TmxEtfCountriesData.model_validate(d) for d in output.to_dict("records")
|
| 114 |
+
]
|
openbb_platform/providers/tmx/openbb_tmx/models/etf_holdings.py
ADDED
|
@@ -0,0 +1,150 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX ETF Holdings fetcher."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from typing import Any, Dict, List, Optional, Union
|
| 6 |
+
|
| 7 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 8 |
+
from openbb_core.provider.standard_models.etf_holdings import (
|
| 9 |
+
EtfHoldingsData,
|
| 10 |
+
EtfHoldingsQueryParams,
|
| 11 |
+
)
|
| 12 |
+
from pydantic import Field, field_validator
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TmxEtfHoldingsQueryParams(EtfHoldingsQueryParams):
|
| 16 |
+
"""TMX ETF Holdings query.
|
| 17 |
+
|
| 18 |
+
Source: https://www.tmx.com/
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
use_cache: bool = Field(
|
| 22 |
+
default=True,
|
| 23 |
+
description="Whether to use a cached request. All ETF data comes from a single JSON file that is updated daily."
|
| 24 |
+
+ " To bypass, set to False. If True, the data will be cached for 4 hours.",
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class TmxEtfHoldingsData(EtfHoldingsData):
|
| 29 |
+
"""TMX ETF Holdings Data."""
|
| 30 |
+
|
| 31 |
+
__alias_dict__ = {
|
| 32 |
+
"shares": "number_of_shares",
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
symbol: Optional[str] = Field(
|
| 36 |
+
description="The ticker symbol of the asset.", default=None
|
| 37 |
+
)
|
| 38 |
+
name: Optional[str] = Field(description="The name of the asset.", default=None)
|
| 39 |
+
weight: Optional[float] = Field(
|
| 40 |
+
description="The weight of the asset in the portfolio, as a normalized percentage.",
|
| 41 |
+
default=None,
|
| 42 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 43 |
+
)
|
| 44 |
+
shares: Optional[Union[int, str]] = Field(
|
| 45 |
+
description="The value of the assets under management.",
|
| 46 |
+
default=None,
|
| 47 |
+
)
|
| 48 |
+
market_value: Optional[Union[float, str]] = Field(
|
| 49 |
+
description="The market value of the holding.", default=None
|
| 50 |
+
)
|
| 51 |
+
currency: Optional[str] = Field(
|
| 52 |
+
default=None, description="The currency of the holding."
|
| 53 |
+
)
|
| 54 |
+
share_percentage: Optional[float] = Field(
|
| 55 |
+
description="The share percentage of the holding, as a normalized percentage.",
|
| 56 |
+
default=None,
|
| 57 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 58 |
+
)
|
| 59 |
+
share_change: Optional[Union[float, str]] = Field(
|
| 60 |
+
description="The change in shares of the holding.", default=None
|
| 61 |
+
)
|
| 62 |
+
country: Optional[str] = Field(
|
| 63 |
+
description="The country of the holding.", default=None
|
| 64 |
+
)
|
| 65 |
+
exchange: Optional[str] = Field(
|
| 66 |
+
description="The exchange code of the holding.", default=None
|
| 67 |
+
)
|
| 68 |
+
type_id: Optional[str] = Field(
|
| 69 |
+
description="The holding type ID of the asset.", default=None
|
| 70 |
+
)
|
| 71 |
+
fund_id: Optional[str] = Field(
|
| 72 |
+
description="The fund ID of the asset.", default=None
|
| 73 |
+
)
|
| 74 |
+
|
| 75 |
+
@field_validator("share_percentage", "weight", mode="before", check_fields=False)
|
| 76 |
+
@classmethod
|
| 77 |
+
def normalize_percent(cls, v):
|
| 78 |
+
"""Return percents as normalized percentage points."""
|
| 79 |
+
return float(v) / 100 if v else None
|
| 80 |
+
|
| 81 |
+
|
| 82 |
+
class TmxEtfHoldingsFetcher(
|
| 83 |
+
Fetcher[
|
| 84 |
+
TmxEtfHoldingsQueryParams,
|
| 85 |
+
List[TmxEtfHoldingsData],
|
| 86 |
+
]
|
| 87 |
+
):
|
| 88 |
+
"""TMX ETF Holdings Fetcher."""
|
| 89 |
+
|
| 90 |
+
@staticmethod
|
| 91 |
+
def transform_query(params: Dict[str, Any]) -> TmxEtfHoldingsQueryParams:
|
| 92 |
+
"""Transform the query."""
|
| 93 |
+
params["symbol"] = (
|
| 94 |
+
params["symbol"].replace(".TO", "").replace(".TSX", "").replace("-", ".")
|
| 95 |
+
)
|
| 96 |
+
return TmxEtfHoldingsQueryParams(**params)
|
| 97 |
+
|
| 98 |
+
@staticmethod
|
| 99 |
+
async def aextract_data(
|
| 100 |
+
query: TmxEtfHoldingsQueryParams,
|
| 101 |
+
credentials: Optional[Dict[str, str]],
|
| 102 |
+
**kwargs: Any,
|
| 103 |
+
) -> List[Dict]:
|
| 104 |
+
"""Return the raw data from the TMX endpoint."""
|
| 105 |
+
# pylint: disable=import-outside-toplevel
|
| 106 |
+
from openbb_tmx.utils.helpers import get_all_etfs
|
| 107 |
+
from pandas import DataFrame
|
| 108 |
+
|
| 109 |
+
query.symbol = query.symbol.upper()
|
| 110 |
+
results = []
|
| 111 |
+
etf = DataFrame()
|
| 112 |
+
etfs = DataFrame(await get_all_etfs(use_cache=query.use_cache))
|
| 113 |
+
etf = etfs[etfs["symbol"] == query.symbol]
|
| 114 |
+
|
| 115 |
+
if len(etf) == 1:
|
| 116 |
+
top_holdings = DataFrame(etf["holdings_top10"].iloc[0])
|
| 117 |
+
top_holdings = top_holdings.dropna(axis=1, how="all")
|
| 118 |
+
_columns = {
|
| 119 |
+
"numberofshares": "number_of_shares",
|
| 120 |
+
"symbol": "symbol",
|
| 121 |
+
"country": "country",
|
| 122 |
+
"fundid": "fund_id",
|
| 123 |
+
"excode": "exchange",
|
| 124 |
+
"securityname": "name",
|
| 125 |
+
"currency": "currency",
|
| 126 |
+
"marketvalue": "market_value",
|
| 127 |
+
"detailholdingtypeid": "type_id",
|
| 128 |
+
"weighting": "weight",
|
| 129 |
+
"sharepercentage": "share_percentage",
|
| 130 |
+
"sharechange": "share_change",
|
| 131 |
+
"shareChange": "share_change",
|
| 132 |
+
}
|
| 133 |
+
top_holdings.rename(columns=_columns, inplace=True)
|
| 134 |
+
results = (
|
| 135 |
+
top_holdings.fillna("N/A")
|
| 136 |
+
.replace("NA", None)
|
| 137 |
+
.replace("N/A", None)
|
| 138 |
+
.to_dict("records")
|
| 139 |
+
)
|
| 140 |
+
|
| 141 |
+
return results
|
| 142 |
+
|
| 143 |
+
@staticmethod
|
| 144 |
+
def transform_data(
|
| 145 |
+
query: TmxEtfHoldingsQueryParams,
|
| 146 |
+
data: List[Dict],
|
| 147 |
+
**kwargs: Any,
|
| 148 |
+
) -> List[TmxEtfHoldingsData]:
|
| 149 |
+
"""Transform the data to the standard format."""
|
| 150 |
+
return [TmxEtfHoldingsData.model_validate(d) for d in data]
|
openbb_platform/providers/tmx/openbb_tmx/models/etf_info.py
ADDED
|
@@ -0,0 +1,239 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX ETF Info fetcher."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from typing import Any, Dict, List, Optional
|
| 6 |
+
|
| 7 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 8 |
+
from openbb_core.provider.standard_models.etf_info import (
|
| 9 |
+
EtfInfoData,
|
| 10 |
+
EtfInfoQueryParams,
|
| 11 |
+
)
|
| 12 |
+
from pydantic import Field, field_validator
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TmxEtfInfoQueryParams(EtfInfoQueryParams):
|
| 16 |
+
"""TMX ETF Info Query Params"""
|
| 17 |
+
|
| 18 |
+
__json_schema_extra__ = {"symbol": {"multiple_items_allowed": True}}
|
| 19 |
+
|
| 20 |
+
use_cache: bool = Field(
|
| 21 |
+
default=True,
|
| 22 |
+
description="Whether to use a cached request. All ETF data comes from a single JSON file that is updated daily."
|
| 23 |
+
+ " To bypass, set to False. If True, the data will be cached for 4 hours.",
|
| 24 |
+
)
|
| 25 |
+
|
| 26 |
+
|
| 27 |
+
class TmxEtfInfoData(EtfInfoData):
|
| 28 |
+
"""TMX ETF Info Data."""
|
| 29 |
+
|
| 30 |
+
__alias_dict__ = {
|
| 31 |
+
"avg_volume": "volume_avg_daily",
|
| 32 |
+
"issuer": "fund_family",
|
| 33 |
+
"avg_volume_30d": "volume_avg_30d",
|
| 34 |
+
"description": "investment_objectives",
|
| 35 |
+
}
|
| 36 |
+
|
| 37 |
+
issuer: Optional[str] = Field(description="The issuer of the ETF.", default=None)
|
| 38 |
+
investment_style: Optional[str] = Field(
|
| 39 |
+
description="The investment style of the ETF.", default=None
|
| 40 |
+
)
|
| 41 |
+
esg: Optional[bool] = Field(
|
| 42 |
+
description="Whether the ETF qualifies as an ESG fund.", default=None
|
| 43 |
+
)
|
| 44 |
+
currency: Optional[str] = Field(description="The currency of the ETF.")
|
| 45 |
+
unit_price: Optional[float] = Field(
|
| 46 |
+
description="The unit price of the ETF.", default=None
|
| 47 |
+
)
|
| 48 |
+
close: Optional[float] = Field(description="The closing price of the ETF.")
|
| 49 |
+
prev_close: Optional[float] = Field(
|
| 50 |
+
description="The previous closing price of the ETF.", default=None
|
| 51 |
+
)
|
| 52 |
+
return_1m: Optional[float] = Field(
|
| 53 |
+
description="The one-month return of the ETF, as a normalized percent",
|
| 54 |
+
default=None,
|
| 55 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 56 |
+
)
|
| 57 |
+
return_3m: Optional[float] = Field(
|
| 58 |
+
description="The three-month return of the ETF, as a normalized percent.",
|
| 59 |
+
default=None,
|
| 60 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 61 |
+
)
|
| 62 |
+
return_6m: Optional[float] = Field(
|
| 63 |
+
description="The six-month return of the ETF, as a normalized percent.",
|
| 64 |
+
default=None,
|
| 65 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 66 |
+
)
|
| 67 |
+
return_ytd: Optional[float] = Field(
|
| 68 |
+
description="The year-to-date return of the ETF, as a normalized percent.",
|
| 69 |
+
default=None,
|
| 70 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 71 |
+
)
|
| 72 |
+
return_1y: Optional[float] = Field(
|
| 73 |
+
description="The one-year return of the ETF, as a normalized percent.",
|
| 74 |
+
default=None,
|
| 75 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 76 |
+
)
|
| 77 |
+
return_3y: Optional[float] = Field(
|
| 78 |
+
description="The three-year return of the ETF, as a normalized percent.",
|
| 79 |
+
default=None,
|
| 80 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 81 |
+
)
|
| 82 |
+
return_5y: Optional[float] = Field(
|
| 83 |
+
description="The five-year return of the ETF, as a normalized percent.",
|
| 84 |
+
default=None,
|
| 85 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 86 |
+
)
|
| 87 |
+
return_10y: Optional[float] = Field(
|
| 88 |
+
description="The ten-year return of the ETF, as a normalized percent.",
|
| 89 |
+
default=None,
|
| 90 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 91 |
+
)
|
| 92 |
+
return_from_inception: Optional[float] = Field(
|
| 93 |
+
description="The return from inception of the ETF, as a normalized percent.",
|
| 94 |
+
default=None,
|
| 95 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 96 |
+
)
|
| 97 |
+
avg_volume: Optional[int] = Field(
|
| 98 |
+
description="The average daily volume of the ETF.",
|
| 99 |
+
default=None,
|
| 100 |
+
)
|
| 101 |
+
avg_volume_30d: Optional[int] = Field(
|
| 102 |
+
description="The 30-day average volume of the ETF.",
|
| 103 |
+
default=None,
|
| 104 |
+
)
|
| 105 |
+
aum: Optional[float] = Field(description="The AUM of the ETF.", default=None)
|
| 106 |
+
pe_ratio: Optional[float] = Field(
|
| 107 |
+
description="The price-to-earnings ratio of the ETF.", default=None
|
| 108 |
+
)
|
| 109 |
+
pb_ratio: Optional[float] = Field(
|
| 110 |
+
description="The price-to-book ratio of the ETF.", default=None
|
| 111 |
+
)
|
| 112 |
+
management_fee: Optional[float] = Field(
|
| 113 |
+
description="The management fee of the ETF, as a normalized percent.",
|
| 114 |
+
default=None,
|
| 115 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 116 |
+
)
|
| 117 |
+
mer: Optional[float] = Field(
|
| 118 |
+
description="The management expense ratio of the ETF, as a normalized percent.",
|
| 119 |
+
default=None,
|
| 120 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 121 |
+
)
|
| 122 |
+
distribution_yield: Optional[float] = Field(
|
| 123 |
+
description="The distribution yield of the ETF, as a normalized percent.",
|
| 124 |
+
default=None,
|
| 125 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 126 |
+
)
|
| 127 |
+
dividend_frequency: Optional[str] = Field(
|
| 128 |
+
description="The dividend payment frequency of the ETF.", default=None
|
| 129 |
+
)
|
| 130 |
+
website: Optional[str] = Field(description="The website of the ETF.", default=None)
|
| 131 |
+
description: Optional[str] = Field(
|
| 132 |
+
description="The description of the ETF.",
|
| 133 |
+
default=None,
|
| 134 |
+
)
|
| 135 |
+
|
| 136 |
+
@field_validator(
|
| 137 |
+
"distribution_yield",
|
| 138 |
+
"return_1m",
|
| 139 |
+
"return_3m",
|
| 140 |
+
"return_6m",
|
| 141 |
+
"return_ytd",
|
| 142 |
+
"return_1y",
|
| 143 |
+
"return_3y",
|
| 144 |
+
"return_5y",
|
| 145 |
+
"return_10y",
|
| 146 |
+
"return_from_inception",
|
| 147 |
+
"mer",
|
| 148 |
+
"management_fee",
|
| 149 |
+
mode="before",
|
| 150 |
+
check_fields=False,
|
| 151 |
+
)
|
| 152 |
+
@classmethod
|
| 153 |
+
def normalize_percent(cls, v):
|
| 154 |
+
"""Return percents as normalized percentage points."""
|
| 155 |
+
return float(v) / 100 if v else None
|
| 156 |
+
|
| 157 |
+
|
| 158 |
+
class TmxEtfInfoFetcher(
|
| 159 |
+
Fetcher[
|
| 160 |
+
TmxEtfInfoQueryParams,
|
| 161 |
+
List[TmxEtfInfoData],
|
| 162 |
+
]
|
| 163 |
+
):
|
| 164 |
+
"""TMX ETF Info Fetcher."""
|
| 165 |
+
|
| 166 |
+
@staticmethod
|
| 167 |
+
def transform_query(params: Dict[str, Any]) -> TmxEtfInfoQueryParams:
|
| 168 |
+
"""Transform the query."""
|
| 169 |
+
return TmxEtfInfoQueryParams(**params)
|
| 170 |
+
|
| 171 |
+
@staticmethod
|
| 172 |
+
async def aextract_data(
|
| 173 |
+
query: TmxEtfInfoQueryParams,
|
| 174 |
+
credentials: Optional[Dict[str, str]],
|
| 175 |
+
**kwargs: Any,
|
| 176 |
+
) -> List[Dict]:
|
| 177 |
+
"""Return the raw data from the TMX endpoint."""
|
| 178 |
+
# pylint: disable=import-outside-toplevel
|
| 179 |
+
from openbb_tmx.utils.helpers import get_all_etfs
|
| 180 |
+
from pandas import DataFrame
|
| 181 |
+
|
| 182 |
+
results = []
|
| 183 |
+
symbols = (
|
| 184 |
+
query.symbol.split(",") if "," in query.symbol else [query.symbol.upper()]
|
| 185 |
+
)
|
| 186 |
+
_data = DataFrame(await get_all_etfs(use_cache=query.use_cache))
|
| 187 |
+
COLUMNS = [
|
| 188 |
+
"symbol",
|
| 189 |
+
"inception_date",
|
| 190 |
+
"name",
|
| 191 |
+
"fund_family",
|
| 192 |
+
"investment_style",
|
| 193 |
+
"esg",
|
| 194 |
+
"currency",
|
| 195 |
+
"unit_price",
|
| 196 |
+
"close",
|
| 197 |
+
"prev_close",
|
| 198 |
+
"return_1m",
|
| 199 |
+
"return_3m",
|
| 200 |
+
"return_6m",
|
| 201 |
+
"return_ytd",
|
| 202 |
+
"return_1y",
|
| 203 |
+
"return_3y",
|
| 204 |
+
"return_5y",
|
| 205 |
+
"return_from_inception",
|
| 206 |
+
"volume_avg_daily",
|
| 207 |
+
"volume_avg_30d",
|
| 208 |
+
"aum",
|
| 209 |
+
"pe_ratio",
|
| 210 |
+
"pb_ratio",
|
| 211 |
+
"management_fee",
|
| 212 |
+
"mer",
|
| 213 |
+
"distribution_yield",
|
| 214 |
+
"dividend_frequency",
|
| 215 |
+
"website",
|
| 216 |
+
"investment_objectives",
|
| 217 |
+
]
|
| 218 |
+
|
| 219 |
+
for symbol in symbols:
|
| 220 |
+
result = {}
|
| 221 |
+
target = DataFrame()
|
| 222 |
+
symbol = ( # noqa: PLW2901
|
| 223 |
+
symbol.replace(".TO", "").replace(".TSX", "").replace("-", ".")
|
| 224 |
+
)
|
| 225 |
+
target = _data[_data["symbol"] == symbol][COLUMNS]
|
| 226 |
+
target = target.fillna("N/A").replace("N/A", None)
|
| 227 |
+
if len(target) > 0:
|
| 228 |
+
result = target.reset_index(drop=True).transpose().to_dict()[0]
|
| 229 |
+
results.append(result)
|
| 230 |
+
return results
|
| 231 |
+
|
| 232 |
+
@staticmethod
|
| 233 |
+
def transform_data(
|
| 234 |
+
query: TmxEtfInfoQueryParams,
|
| 235 |
+
data: List[Dict],
|
| 236 |
+
**kwargs: Any,
|
| 237 |
+
) -> List[TmxEtfInfoData]:
|
| 238 |
+
"""Return the transformed data."""
|
| 239 |
+
return [TmxEtfInfoData.model_validate(d) for d in data]
|
openbb_platform/providers/tmx/openbb_tmx/models/etf_search.py
ADDED
|
@@ -0,0 +1,270 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX ETF Search fetcher."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from typing import Any, Dict, List, Literal, Optional
|
| 6 |
+
|
| 7 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 8 |
+
from openbb_core.provider.standard_models.etf_search import (
|
| 9 |
+
EtfSearchData,
|
| 10 |
+
EtfSearchQueryParams,
|
| 11 |
+
)
|
| 12 |
+
from pydantic import Field, field_validator
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TmxEtfSearchQueryParams(EtfSearchQueryParams):
|
| 16 |
+
"""TMX ETF Search query.
|
| 17 |
+
|
| 18 |
+
Source: https://www.tmx.com/
|
| 19 |
+
"""
|
| 20 |
+
|
| 21 |
+
div_freq: Optional[Literal["monthly", "annually", "quarterly"]] = Field(
|
| 22 |
+
description="The dividend payment frequency.", default=None
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
sort_by: Optional[
|
| 26 |
+
Literal[
|
| 27 |
+
"aum",
|
| 28 |
+
"return_1m",
|
| 29 |
+
"return_3m",
|
| 30 |
+
"return_6m",
|
| 31 |
+
"return_1y",
|
| 32 |
+
"return_3y",
|
| 33 |
+
"return_ytd",
|
| 34 |
+
"beta_1y",
|
| 35 |
+
"volume_avg_daily",
|
| 36 |
+
"management_fee",
|
| 37 |
+
"distribution_yield",
|
| 38 |
+
"pb_ratio",
|
| 39 |
+
"pe_ratio",
|
| 40 |
+
]
|
| 41 |
+
] = Field(description="The column to sort by.", default=None)
|
| 42 |
+
|
| 43 |
+
use_cache: bool = Field(
|
| 44 |
+
default=True,
|
| 45 |
+
description="Whether to use a cached request. All ETF data comes from a single JSON file that is updated daily."
|
| 46 |
+
+ " To bypass, set to False. If True, the data will be cached for 4 hours.",
|
| 47 |
+
)
|
| 48 |
+
|
| 49 |
+
|
| 50 |
+
class TmxEtfSearchData(EtfSearchData):
|
| 51 |
+
"""TMX ETF Search Data."""
|
| 52 |
+
|
| 53 |
+
__alias_dict__ = {
|
| 54 |
+
"issuer": "fund_family",
|
| 55 |
+
"avg_volume": "volume_avg_daily",
|
| 56 |
+
"avg_volume_30d": "volume_avg_30d",
|
| 57 |
+
}
|
| 58 |
+
|
| 59 |
+
short_name: Optional[str] = Field(
|
| 60 |
+
description="The short name of the ETF.", default=None
|
| 61 |
+
)
|
| 62 |
+
inception_date: Optional[str] = Field(
|
| 63 |
+
description="The inception date of the ETF.", default=None
|
| 64 |
+
)
|
| 65 |
+
issuer: Optional[str] = Field(description="The issuer of the ETF.", default=None)
|
| 66 |
+
investment_style: Optional[str] = Field(
|
| 67 |
+
description="The investment style of the ETF.", default=None
|
| 68 |
+
)
|
| 69 |
+
esg: Optional[bool] = Field(
|
| 70 |
+
description="Whether the ETF qualifies as an ESG fund.", default=None
|
| 71 |
+
)
|
| 72 |
+
currency: Optional[str] = Field(description="The currency of the ETF.")
|
| 73 |
+
unit_price: Optional[float] = Field(
|
| 74 |
+
description="The unit price of the ETF.", default=None
|
| 75 |
+
)
|
| 76 |
+
close: Optional[float] = Field(description="The closing price of the ETF.")
|
| 77 |
+
prev_close: Optional[float] = Field(
|
| 78 |
+
description="The previous closing price of the ETF.", default=None
|
| 79 |
+
)
|
| 80 |
+
return_1m: Optional[float] = Field(
|
| 81 |
+
description="The one-month return of the ETF, as a normalized percent.",
|
| 82 |
+
default=None,
|
| 83 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 84 |
+
)
|
| 85 |
+
return_3m: Optional[float] = Field(
|
| 86 |
+
description="The three-month return of the ETF, as a normalized percent.",
|
| 87 |
+
default=None,
|
| 88 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 89 |
+
)
|
| 90 |
+
return_6m: Optional[float] = Field(
|
| 91 |
+
description="The six-month return of the ETF, as a normalized percent.",
|
| 92 |
+
default=None,
|
| 93 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 94 |
+
)
|
| 95 |
+
return_ytd: Optional[float] = Field(
|
| 96 |
+
description="The year-to-date return of the ETF, as a normalized percent.",
|
| 97 |
+
default=None,
|
| 98 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 99 |
+
)
|
| 100 |
+
return_1y: Optional[float] = Field(
|
| 101 |
+
description="The one-year return of the ETF, as a normalized percent.",
|
| 102 |
+
default=None,
|
| 103 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 104 |
+
)
|
| 105 |
+
beta_1y: Optional[float] = Field(
|
| 106 |
+
description="The one-year beta of the ETF, as a normalized percent.",
|
| 107 |
+
default=None,
|
| 108 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 109 |
+
)
|
| 110 |
+
return_3y: Optional[float] = Field(
|
| 111 |
+
description="The three-year return of the ETF, as a normalized percent.",
|
| 112 |
+
default=None,
|
| 113 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 114 |
+
)
|
| 115 |
+
beta_3y: Optional[float] = Field(
|
| 116 |
+
description="The three-year beta of the ETF, as a normalized percent.",
|
| 117 |
+
default=None,
|
| 118 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 119 |
+
)
|
| 120 |
+
return_5y: Optional[float] = Field(
|
| 121 |
+
description="The five-year return of the ETF, as a normalized percent.",
|
| 122 |
+
default=None,
|
| 123 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 124 |
+
)
|
| 125 |
+
beta_5y: Optional[float] = Field(
|
| 126 |
+
description="The five-year beta of the ETF, as a normalized percent.",
|
| 127 |
+
default=None,
|
| 128 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 129 |
+
)
|
| 130 |
+
return_10y: Optional[float] = Field(
|
| 131 |
+
description="The ten-year return of the ETF, as a normalized percent.",
|
| 132 |
+
default=None,
|
| 133 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 134 |
+
)
|
| 135 |
+
beta_10y: Optional[float] = Field(
|
| 136 |
+
description="The ten-year beta of the ETF.", default=None
|
| 137 |
+
)
|
| 138 |
+
beta_15y: Optional[float] = Field(
|
| 139 |
+
description="The fifteen-year beta of the ETF.", default=None
|
| 140 |
+
)
|
| 141 |
+
return_from_inception: Optional[float] = Field(
|
| 142 |
+
description="The return from inception of the ETF, as a normalized percent.",
|
| 143 |
+
default=None,
|
| 144 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 145 |
+
)
|
| 146 |
+
avg_volume: Optional[int] = Field(
|
| 147 |
+
description="The average daily volume of the ETF.",
|
| 148 |
+
default=None,
|
| 149 |
+
)
|
| 150 |
+
avg_volume_30d: Optional[int] = Field(
|
| 151 |
+
description="The 30-day average volume of the ETF.",
|
| 152 |
+
default=None,
|
| 153 |
+
)
|
| 154 |
+
aum: Optional[float] = Field(description="The AUM of the ETF.", default=None)
|
| 155 |
+
pe_ratio: Optional[float] = Field(
|
| 156 |
+
description="The price-to-earnings ratio of the ETF.", default=None
|
| 157 |
+
)
|
| 158 |
+
pb_ratio: Optional[float] = Field(
|
| 159 |
+
description="The price-to-book ratio of the ETF.", default=None
|
| 160 |
+
)
|
| 161 |
+
management_fee: Optional[float] = Field(
|
| 162 |
+
description="The management fee of the ETF, as a normalized percent.",
|
| 163 |
+
default=None,
|
| 164 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 165 |
+
)
|
| 166 |
+
mer: Optional[float] = Field(
|
| 167 |
+
description="The management expense ratio of the ETF, as a normalized percent.",
|
| 168 |
+
default=None,
|
| 169 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 170 |
+
)
|
| 171 |
+
distribution_yield: Optional[float] = Field(
|
| 172 |
+
description="The distribution yield of the ETF, as a normalized percent.",
|
| 173 |
+
default=None,
|
| 174 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 175 |
+
)
|
| 176 |
+
dividend_frequency: Optional[str] = Field(
|
| 177 |
+
description="The dividend payment frequency of the ETF.", default=None
|
| 178 |
+
)
|
| 179 |
+
|
| 180 |
+
@field_validator(
|
| 181 |
+
"distribution_yield",
|
| 182 |
+
"return_1m",
|
| 183 |
+
"return_3m",
|
| 184 |
+
"return_6m",
|
| 185 |
+
"return_ytd",
|
| 186 |
+
"return_1y",
|
| 187 |
+
"return_3y",
|
| 188 |
+
"return_5y",
|
| 189 |
+
"return_10y",
|
| 190 |
+
"return_from_inception",
|
| 191 |
+
"mer",
|
| 192 |
+
"management_fee",
|
| 193 |
+
mode="before",
|
| 194 |
+
check_fields=False,
|
| 195 |
+
)
|
| 196 |
+
@classmethod
|
| 197 |
+
def normalize_percent(cls, v):
|
| 198 |
+
"""Return percents as normalized percentage points."""
|
| 199 |
+
if v:
|
| 200 |
+
return float(v) / 100
|
| 201 |
+
return None
|
| 202 |
+
|
| 203 |
+
|
| 204 |
+
class TmxEtfSearchFetcher(
|
| 205 |
+
Fetcher[
|
| 206 |
+
TmxEtfSearchQueryParams,
|
| 207 |
+
List[TmxEtfSearchData],
|
| 208 |
+
]
|
| 209 |
+
):
|
| 210 |
+
"""Transform the query, extract and transform the data from the TMX endpoints."""
|
| 211 |
+
|
| 212 |
+
@staticmethod
|
| 213 |
+
def transform_query(params: Dict[str, Any]) -> TmxEtfSearchQueryParams:
|
| 214 |
+
"""Transform the query."""
|
| 215 |
+
return TmxEtfSearchQueryParams(**params)
|
| 216 |
+
|
| 217 |
+
@staticmethod
|
| 218 |
+
async def aextract_data(
|
| 219 |
+
query: TmxEtfSearchQueryParams,
|
| 220 |
+
credentials: Optional[Dict[str, str]],
|
| 221 |
+
**kwargs: Any,
|
| 222 |
+
) -> List[Dict]:
|
| 223 |
+
"""Return the raw data from the TMX endpoint."""
|
| 224 |
+
# pylint: disable=import-outside-toplevel
|
| 225 |
+
from openbb_tmx.utils.helpers import get_all_etfs
|
| 226 |
+
from pandas import DataFrame
|
| 227 |
+
|
| 228 |
+
etfs = DataFrame(await get_all_etfs(use_cache=query.use_cache))
|
| 229 |
+
|
| 230 |
+
if query.query:
|
| 231 |
+
etfs = etfs[
|
| 232 |
+
etfs["name"].str.contains(query.query, case=False)
|
| 233 |
+
| etfs["short_name"].str.contains(query.query, case=False)
|
| 234 |
+
| etfs["investment_style"].str.contains(query.query, case=False)
|
| 235 |
+
| etfs["investment_objectives"].str.contains(query.query, case=False)
|
| 236 |
+
| etfs["symbol"].str.contains(query.query, case=False)
|
| 237 |
+
]
|
| 238 |
+
|
| 239 |
+
data = etfs.copy()
|
| 240 |
+
|
| 241 |
+
if query.div_freq:
|
| 242 |
+
data = data[data["dividend_frequency"] == query.div_freq.capitalize()]
|
| 243 |
+
|
| 244 |
+
if query.sort_by:
|
| 245 |
+
data = data.sort_values(by=query.sort_by, ascending=False)
|
| 246 |
+
|
| 247 |
+
data.drop(
|
| 248 |
+
columns=[
|
| 249 |
+
"sectors",
|
| 250 |
+
"regions",
|
| 251 |
+
"holdings_top10_summary",
|
| 252 |
+
"holdings_top10",
|
| 253 |
+
"additional_data",
|
| 254 |
+
"website",
|
| 255 |
+
"asset_class_id",
|
| 256 |
+
"investment_objectives",
|
| 257 |
+
],
|
| 258 |
+
inplace=True,
|
| 259 |
+
)
|
| 260 |
+
data = data.dropna(how="all")
|
| 261 |
+
return data.fillna("N/A").replace("N/A", None).to_dict("records")
|
| 262 |
+
|
| 263 |
+
@staticmethod
|
| 264 |
+
def transform_data(
|
| 265 |
+
query: TmxEtfSearchQueryParams,
|
| 266 |
+
data: List[Dict],
|
| 267 |
+
**kwargs: Any,
|
| 268 |
+
) -> List[TmxEtfSearchData]:
|
| 269 |
+
"""Transform the data to the standard format."""
|
| 270 |
+
return [TmxEtfSearchData.model_validate(d) for d in data]
|
openbb_platform/providers/tmx/openbb_tmx/models/etf_sectors.py
ADDED
|
@@ -0,0 +1,95 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX ETF Sectors fetcher."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from typing import Any, Dict, List, Optional
|
| 6 |
+
|
| 7 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 8 |
+
from openbb_core.provider.standard_models.etf_sectors import (
|
| 9 |
+
EtfSectorsData,
|
| 10 |
+
EtfSectorsQueryParams,
|
| 11 |
+
)
|
| 12 |
+
from pydantic import Field
|
| 13 |
+
|
| 14 |
+
|
| 15 |
+
class TmxEtfSectorsQueryParams(EtfSectorsQueryParams):
|
| 16 |
+
"""TMX ETF Sectors Query Params"""
|
| 17 |
+
|
| 18 |
+
use_cache: bool = Field(
|
| 19 |
+
default=True,
|
| 20 |
+
description="Whether to use a cached request. All ETF data comes from a single JSON file that is updated daily."
|
| 21 |
+
+ " To bypass, set to False. If True, the data will be cached for 4 hours.",
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class TmxEtfSectorsData(EtfSectorsData):
|
| 26 |
+
"""TMX ETF Sectors Data."""
|
| 27 |
+
|
| 28 |
+
|
| 29 |
+
class TmxEtfSectorsFetcher(
|
| 30 |
+
Fetcher[
|
| 31 |
+
TmxEtfSectorsQueryParams,
|
| 32 |
+
List[TmxEtfSectorsData],
|
| 33 |
+
]
|
| 34 |
+
):
|
| 35 |
+
"""TMX ETF Sectors Fetcher."""
|
| 36 |
+
|
| 37 |
+
@staticmethod
|
| 38 |
+
def transform_query(params: Dict[str, Any]) -> TmxEtfSectorsQueryParams:
|
| 39 |
+
"""Transform the query."""
|
| 40 |
+
return TmxEtfSectorsQueryParams(**params)
|
| 41 |
+
|
| 42 |
+
@staticmethod
|
| 43 |
+
async def aextract_data(
|
| 44 |
+
query: TmxEtfSectorsQueryParams,
|
| 45 |
+
credentials: Optional[Dict[str, str]],
|
| 46 |
+
**kwargs: Any,
|
| 47 |
+
) -> List[Dict]:
|
| 48 |
+
"""Return the raw data from the TMX endpoint."""
|
| 49 |
+
# pylint: disable=import-outside-toplevel
|
| 50 |
+
from openbb_core.provider.utils.errors import EmptyDataError # noqa
|
| 51 |
+
from openbb_tmx.utils.helpers import get_all_etfs
|
| 52 |
+
from pandas import DataFrame
|
| 53 |
+
|
| 54 |
+
target = DataFrame()
|
| 55 |
+
_data = DataFrame(await get_all_etfs(use_cache=query.use_cache))
|
| 56 |
+
symbol = (
|
| 57 |
+
query.symbol.upper()
|
| 58 |
+
.replace("-", ".")
|
| 59 |
+
.replace(".TO", "")
|
| 60 |
+
.replace(".TSX", "")
|
| 61 |
+
)
|
| 62 |
+
_target = _data[_data["symbol"] == symbol]["sectors"]
|
| 63 |
+
|
| 64 |
+
if len(_target) > 0:
|
| 65 |
+
target = DataFrame.from_records(_target.iloc[0]).rename(
|
| 66 |
+
columns={"name": "sector", "percent": "weight"}
|
| 67 |
+
)
|
| 68 |
+
return target.to_dict(orient="records")
|
| 69 |
+
raise EmptyDataError(f"No sectors info found for ETF symbol: {query.symbol}.")
|
| 70 |
+
|
| 71 |
+
@staticmethod
|
| 72 |
+
def transform_data(
|
| 73 |
+
query: TmxEtfSectorsQueryParams,
|
| 74 |
+
data: List[Dict],
|
| 75 |
+
**kwargs: Any,
|
| 76 |
+
) -> List[TmxEtfSectorsData]:
|
| 77 |
+
"""Return the transformed data."""
|
| 78 |
+
# pylint: disable=import-outside-toplevel
|
| 79 |
+
from openbb_core.provider.utils.errors import EmptyDataError # noqa
|
| 80 |
+
from numpy import nan
|
| 81 |
+
from pandas import DataFrame
|
| 82 |
+
|
| 83 |
+
target = DataFrame(data)
|
| 84 |
+
try:
|
| 85 |
+
target["weight"] = target["weight"] / 100
|
| 86 |
+
except KeyError:
|
| 87 |
+
raise EmptyDataError( # pylint: disable=raise-missing-from
|
| 88 |
+
f"No sectors info found for ETF symbol: {query.symbol}."
|
| 89 |
+
)
|
| 90 |
+
|
| 91 |
+
target = target.replace({nan: None})
|
| 92 |
+
return [
|
| 93 |
+
TmxEtfSectorsData.model_validate(d)
|
| 94 |
+
for d in target.to_dict(orient="records")
|
| 95 |
+
]
|
openbb_platform/providers/tmx/openbb_tmx/models/gainers.py
ADDED
|
@@ -0,0 +1,174 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Equity Gainers Model."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from typing import Any, Dict, List, Literal, Optional
|
| 6 |
+
|
| 7 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 8 |
+
from openbb_core.provider.standard_models.equity_performance import (
|
| 9 |
+
EquityPerformanceData,
|
| 10 |
+
EquityPerformanceQueryParams,
|
| 11 |
+
)
|
| 12 |
+
from openbb_core.provider.utils.errors import EmptyDataError
|
| 13 |
+
from pydantic import Field, field_validator, model_validator
|
| 14 |
+
|
| 15 |
+
STOCK_LISTS_DICT = {
|
| 16 |
+
"dividend": "TOP_DIVIDEND",
|
| 17 |
+
"energy": "TOP_ENERGY",
|
| 18 |
+
"healthcare": "TOP_HEALTHCARE",
|
| 19 |
+
"industrials": "TOP_INDUSTRIALS",
|
| 20 |
+
"price_performer": "TOP_PRICE_PERFORMER",
|
| 21 |
+
"rising_stars": "RISING_STARS",
|
| 22 |
+
"real_estate": "TOP_REAL_ESTATE",
|
| 23 |
+
"tech": "TOP_TECH",
|
| 24 |
+
"utilities": "TOP_UTILITIES",
|
| 25 |
+
"52w_high": "TOP_WEEK_52_HIGH",
|
| 26 |
+
"volume": "TOP_VOLUME",
|
| 27 |
+
}
|
| 28 |
+
|
| 29 |
+
STOCK_LISTS = Literal[
|
| 30 |
+
"dividend",
|
| 31 |
+
"energy",
|
| 32 |
+
"healthcare",
|
| 33 |
+
"industrials",
|
| 34 |
+
"price_performer",
|
| 35 |
+
"rising_stars",
|
| 36 |
+
"real_estate",
|
| 37 |
+
"tech",
|
| 38 |
+
"utilities",
|
| 39 |
+
"52w_high",
|
| 40 |
+
"volume",
|
| 41 |
+
]
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class TmxGainersQueryParams(EquityPerformanceQueryParams):
|
| 45 |
+
"""TMX Gainers Query Params."""
|
| 46 |
+
|
| 47 |
+
__json_schema_extra__ = {
|
| 48 |
+
"category": {
|
| 49 |
+
"multiple_items_allowed": False,
|
| 50 |
+
"choices": list(STOCK_LISTS_DICT),
|
| 51 |
+
},
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
category: STOCK_LISTS = Field(
|
| 55 |
+
default="price_performer",
|
| 56 |
+
description="The category of list to retrieve. Defaults to `price_performer`.",
|
| 57 |
+
)
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
class TmxGainersData(EquityPerformanceData):
|
| 61 |
+
"""TMX Gainers Data."""
|
| 62 |
+
|
| 63 |
+
__alias_dict__ = {
|
| 64 |
+
"name": "longName",
|
| 65 |
+
"change": "priceChange",
|
| 66 |
+
"percent_change": "percentChange",
|
| 67 |
+
"thirty_day_price_change": "30 Day Price Change",
|
| 68 |
+
"dividend_yield": "Dividend Yield",
|
| 69 |
+
"year_high": "52 Week High",
|
| 70 |
+
"avg_volume_10d": "10 Day Avg. Volume",
|
| 71 |
+
"ninety_day_price_change": "90 Day Price Change",
|
| 72 |
+
}
|
| 73 |
+
thirty_day_price_change: Optional[float] = Field(
|
| 74 |
+
default=None,
|
| 75 |
+
description="30 Day Price Change.",
|
| 76 |
+
json_schema_extra={"x-unit_measurement": "currency"},
|
| 77 |
+
)
|
| 78 |
+
ninety_day_price_change: Optional[float] = Field(
|
| 79 |
+
default=None,
|
| 80 |
+
description="90 Day Price Change.",
|
| 81 |
+
json_schema_extra={"x-unit_measurement": "currency"},
|
| 82 |
+
)
|
| 83 |
+
dividend_yield: Optional[float] = Field(
|
| 84 |
+
default=None,
|
| 85 |
+
description="Dividend Yield.",
|
| 86 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 87 |
+
)
|
| 88 |
+
avg_volume_10d: Optional[float] = Field(
|
| 89 |
+
default=None,
|
| 90 |
+
description="10 Day Avg. Volume.",
|
| 91 |
+
)
|
| 92 |
+
rank: int = Field(description="The rank of the stock in the list.")
|
| 93 |
+
|
| 94 |
+
@field_validator("percent_change", mode="after", check_fields=False)
|
| 95 |
+
@classmethod
|
| 96 |
+
def normalize_percent(cls, v):
|
| 97 |
+
"""Return percents as normalized percentage points."""
|
| 98 |
+
return float(v) / 100 if v else 0
|
| 99 |
+
|
| 100 |
+
@model_validator(mode="before")
|
| 101 |
+
@classmethod
|
| 102 |
+
def check_metric(cls, values):
|
| 103 |
+
"""Check for missing metrics."""
|
| 104 |
+
for k, v in values.items():
|
| 105 |
+
if v is None or v == "-":
|
| 106 |
+
values[k] = 0
|
| 107 |
+
if k in ["Dividend Yield"]:
|
| 108 |
+
values[k] = float(v) / 100 if v else None
|
| 109 |
+
return values
|
| 110 |
+
|
| 111 |
+
|
| 112 |
+
class TmxGainersFetcher(
|
| 113 |
+
Fetcher[
|
| 114 |
+
TmxGainersQueryParams,
|
| 115 |
+
List[TmxGainersData],
|
| 116 |
+
]
|
| 117 |
+
):
|
| 118 |
+
"""TMX Gainers Fetcher."""
|
| 119 |
+
|
| 120 |
+
@staticmethod
|
| 121 |
+
def transform_query(params: Dict[str, Any]) -> TmxGainersQueryParams:
|
| 122 |
+
"""Transform the query."""
|
| 123 |
+
return TmxGainersQueryParams(**params)
|
| 124 |
+
|
| 125 |
+
@staticmethod
|
| 126 |
+
async def aextract_data(
|
| 127 |
+
query: TmxGainersQueryParams,
|
| 128 |
+
credentials: Optional[Dict[str, str]],
|
| 129 |
+
**kwargs: Any,
|
| 130 |
+
) -> List[TmxGainersData]:
|
| 131 |
+
"""Return the raw data from the TMX endpoint."""
|
| 132 |
+
# pylint: disable=import-outside-toplevel
|
| 133 |
+
import json # noqa
|
| 134 |
+
from openbb_tmx.utils import gql # noqa
|
| 135 |
+
from openbb_tmx.utils.helpers import get_data_from_gql, get_random_agent # noqa
|
| 136 |
+
|
| 137 |
+
user_agent = get_random_agent()
|
| 138 |
+
payload = gql.get_stock_list_payload.copy()
|
| 139 |
+
payload["variables"]["stockListId"] = STOCK_LISTS_DICT[query.category]
|
| 140 |
+
|
| 141 |
+
url = "https://app-money.tmx.com/graphql"
|
| 142 |
+
response = await get_data_from_gql(
|
| 143 |
+
method="POST",
|
| 144 |
+
url=url,
|
| 145 |
+
data=json.dumps(payload),
|
| 146 |
+
headers={
|
| 147 |
+
"authority": "app-money.tmx.com",
|
| 148 |
+
"referer": "https://money.tmx.com",
|
| 149 |
+
"locale": "en",
|
| 150 |
+
"Content-Type": "application/json",
|
| 151 |
+
"User-Agent": user_agent,
|
| 152 |
+
"Accept": "*/*",
|
| 153 |
+
},
|
| 154 |
+
timeout=5,
|
| 155 |
+
)
|
| 156 |
+
if "errors" in response:
|
| 157 |
+
raise EmptyDataError()
|
| 158 |
+
results = response["data"]["stockList"].get("listItems")
|
| 159 |
+
metric = response["data"]["stockList"].get("metricTitle")
|
| 160 |
+
for i in range(len(results)): # pylint: disable=C0200
|
| 161 |
+
if "metric" in results[i]:
|
| 162 |
+
results[i][metric] = results[i]["metric"]
|
| 163 |
+
del results[i]["metric"]
|
| 164 |
+
|
| 165 |
+
return results
|
| 166 |
+
|
| 167 |
+
@staticmethod
|
| 168 |
+
def transform_data(
|
| 169 |
+
query: TmxGainersQueryParams,
|
| 170 |
+
data: List[Dict],
|
| 171 |
+
**kwargs: Any,
|
| 172 |
+
) -> List[TmxGainersData]:
|
| 173 |
+
"""Transform the data to the model."""
|
| 174 |
+
return [TmxGainersData.model_validate(d) for d in data]
|
openbb_platform/providers/tmx/openbb_tmx/models/historical_dividends.py
ADDED
|
@@ -0,0 +1,111 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Stock Dividends Model"""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from datetime import date as dateType
|
| 6 |
+
from typing import Any, Dict, List, Optional
|
| 7 |
+
|
| 8 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 9 |
+
from openbb_core.provider.standard_models.historical_dividends import (
|
| 10 |
+
HistoricalDividendsData,
|
| 11 |
+
HistoricalDividendsQueryParams,
|
| 12 |
+
)
|
| 13 |
+
from pydantic import Field
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class TmxHistoricalDividendsQueryParams(HistoricalDividendsQueryParams):
|
| 17 |
+
"""TMX Historical Dividends Query Params"""
|
| 18 |
+
|
| 19 |
+
|
| 20 |
+
class TmxHistoricalDividendsData(HistoricalDividendsData):
|
| 21 |
+
"""TMX Historical Dividends Data"""
|
| 22 |
+
|
| 23 |
+
__alias_dict__ = {
|
| 24 |
+
"ex_dividend_date": "exDate",
|
| 25 |
+
"record_date": "recordDate",
|
| 26 |
+
"payment_date": "payableDate",
|
| 27 |
+
"declaration_date": "declarationDate",
|
| 28 |
+
}
|
| 29 |
+
currency: Optional[str] = Field(
|
| 30 |
+
default=None, description="The currency the dividend is paid in."
|
| 31 |
+
)
|
| 32 |
+
decalaration_date: Optional[dateType] = Field(
|
| 33 |
+
default=None, description="The date of the announcement."
|
| 34 |
+
)
|
| 35 |
+
record_date: Optional[dateType] = Field(
|
| 36 |
+
default=None,
|
| 37 |
+
description="The record date of ownership for rights to the dividend.",
|
| 38 |
+
)
|
| 39 |
+
payment_date: Optional[dateType] = Field(
|
| 40 |
+
default=None, description="The date the dividend is paid."
|
| 41 |
+
)
|
| 42 |
+
|
| 43 |
+
|
| 44 |
+
class TmxHistoricalDividendsFetcher(
|
| 45 |
+
Fetcher[TmxHistoricalDividendsQueryParams, List[TmxHistoricalDividendsData]]
|
| 46 |
+
):
|
| 47 |
+
"""TMX Historical Dividends Fetcher"""
|
| 48 |
+
|
| 49 |
+
@staticmethod
|
| 50 |
+
def transform_query(params: Dict[str, Any]) -> TmxHistoricalDividendsQueryParams:
|
| 51 |
+
"""Transform the query."""
|
| 52 |
+
return TmxHistoricalDividendsQueryParams(**params)
|
| 53 |
+
|
| 54 |
+
@staticmethod
|
| 55 |
+
async def aextract_data(
|
| 56 |
+
query: TmxHistoricalDividendsQueryParams,
|
| 57 |
+
credentials: Optional[Dict[str, str]],
|
| 58 |
+
**kwargs: Any,
|
| 59 |
+
) -> List[Dict]:
|
| 60 |
+
"""Return the raw data from the TMX endpoint."""
|
| 61 |
+
# pylint: disable=import-outside-toplevel
|
| 62 |
+
import json # noqa
|
| 63 |
+
from openbb_tmx.utils import gql # noqa
|
| 64 |
+
from openbb_tmx.utils.helpers import get_data_from_gql, get_random_agent # noqa
|
| 65 |
+
|
| 66 |
+
user_agent = get_random_agent()
|
| 67 |
+
symbol = (
|
| 68 |
+
query.symbol.upper()
|
| 69 |
+
.replace("-", ".")
|
| 70 |
+
.replace(".TO", "")
|
| 71 |
+
.replace(".TSX", "")
|
| 72 |
+
)
|
| 73 |
+
data = []
|
| 74 |
+
payload = gql.historical_dividends_payload.copy()
|
| 75 |
+
payload["variables"]["symbol"] = symbol
|
| 76 |
+
payload["variables"]["batch"] = 500
|
| 77 |
+
payload["variables"]["page"] = 1
|
| 78 |
+
|
| 79 |
+
url = "https://app-money.tmx.com/graphql"
|
| 80 |
+
response = await get_data_from_gql(
|
| 81 |
+
method="POST",
|
| 82 |
+
url=url,
|
| 83 |
+
data=json.dumps(payload),
|
| 84 |
+
headers={
|
| 85 |
+
"authority": "app-money.tmx.com",
|
| 86 |
+
"referer": f"https://money.tmx.com/en/quote/{symbol}",
|
| 87 |
+
"locale": "en",
|
| 88 |
+
"Content-Type": "application/json",
|
| 89 |
+
"User-Agent": user_agent,
|
| 90 |
+
"Accept": "*/*",
|
| 91 |
+
},
|
| 92 |
+
timeout=5,
|
| 93 |
+
)
|
| 94 |
+
try:
|
| 95 |
+
if response.get("data", {}).get("dividends"): # type: ignore
|
| 96 |
+
data = response["data"]["dividends"] # type: ignore
|
| 97 |
+
data = sorted(data["dividends"], key=lambda d: d["exDate"]) # type: ignore
|
| 98 |
+
|
| 99 |
+
except Exception as e:
|
| 100 |
+
raise RuntimeError(e) from e
|
| 101 |
+
|
| 102 |
+
return data
|
| 103 |
+
|
| 104 |
+
@staticmethod
|
| 105 |
+
def transform_data(
|
| 106 |
+
query: TmxHistoricalDividendsQueryParams,
|
| 107 |
+
data: List[Dict],
|
| 108 |
+
**kwargs: Any,
|
| 109 |
+
) -> List[TmxHistoricalDividendsData]:
|
| 110 |
+
"""Return the transformed data."""
|
| 111 |
+
return [TmxHistoricalDividendsData.model_validate(d) for d in data]
|
openbb_platform/providers/tmx/openbb_tmx/models/index_constituents.py
ADDED
|
@@ -0,0 +1,98 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Index Constituents Model."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from typing import Any, Dict, List, Optional
|
| 6 |
+
|
| 7 |
+
from openbb_core.app.model.abstract.error import OpenBBError
|
| 8 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 9 |
+
from openbb_core.provider.standard_models.index_constituents import (
|
| 10 |
+
IndexConstituentsData,
|
| 11 |
+
IndexConstituentsQueryParams,
|
| 12 |
+
)
|
| 13 |
+
from openbb_core.provider.utils.errors import EmptyDataError
|
| 14 |
+
from pydantic import Field, field_validator
|
| 15 |
+
|
| 16 |
+
|
| 17 |
+
class TmxIndexConstituentsQueryParams(IndexConstituentsQueryParams):
|
| 18 |
+
"""TMX Index Constituents Query Params."""
|
| 19 |
+
|
| 20 |
+
use_cache: bool = Field(
|
| 21 |
+
default=True,
|
| 22 |
+
description="Whether to use a cached request."
|
| 23 |
+
+ " Index data is from a single JSON file, updated each day after close."
|
| 24 |
+
+ " It is cached for one day. To bypass, set to False.",
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class TmxIndexConstituentsData(IndexConstituentsData):
|
| 29 |
+
"""TMX Index Constituents Data."""
|
| 30 |
+
|
| 31 |
+
__alias_dict__ = {
|
| 32 |
+
"market_value": "quotedmarketvalue",
|
| 33 |
+
}
|
| 34 |
+
|
| 35 |
+
market_value: Optional[float] = Field(
|
| 36 |
+
default=None,
|
| 37 |
+
description="The quoted market value of the asset.",
|
| 38 |
+
)
|
| 39 |
+
|
| 40 |
+
@field_validator("weight", mode="before", check_fields=False)
|
| 41 |
+
@classmethod
|
| 42 |
+
def normalize_percent(cls, v):
|
| 43 |
+
"""Return percents as normalized percentage points."""
|
| 44 |
+
return float(v) / 100 if v else None
|
| 45 |
+
|
| 46 |
+
|
| 47 |
+
class TmxIndexConstituentsFetcher(
|
| 48 |
+
Fetcher[
|
| 49 |
+
TmxIndexConstituentsQueryParams,
|
| 50 |
+
List[TmxIndexConstituentsData],
|
| 51 |
+
]
|
| 52 |
+
):
|
| 53 |
+
"""TMX Index Constituents Fetcher."""
|
| 54 |
+
|
| 55 |
+
@staticmethod
|
| 56 |
+
def transform_query(params: Dict[str, Any]) -> TmxIndexConstituentsQueryParams:
|
| 57 |
+
"""Transform the query."""
|
| 58 |
+
return TmxIndexConstituentsQueryParams(**params)
|
| 59 |
+
|
| 60 |
+
@staticmethod
|
| 61 |
+
async def aextract_data(
|
| 62 |
+
query: TmxIndexConstituentsQueryParams,
|
| 63 |
+
credentials: Optional[Dict[str, str]],
|
| 64 |
+
**kwargs: Any,
|
| 65 |
+
) -> Dict:
|
| 66 |
+
"""Return the raw data from the TMX endpoint."""
|
| 67 |
+
# pylint: disable=import-outside-toplevel
|
| 68 |
+
from openbb_tmx.utils.helpers import get_data_from_url, get_indices_backend
|
| 69 |
+
|
| 70 |
+
url = "https://tmxinfoservices.com/files/indices/sptsx-indices.json"
|
| 71 |
+
|
| 72 |
+
data = await get_data_from_url(
|
| 73 |
+
url,
|
| 74 |
+
use_cache=query.use_cache,
|
| 75 |
+
backend=get_indices_backend(),
|
| 76 |
+
)
|
| 77 |
+
|
| 78 |
+
return data
|
| 79 |
+
|
| 80 |
+
@staticmethod
|
| 81 |
+
def transform_data(
|
| 82 |
+
query: TmxIndexConstituentsQueryParams, data: Dict, **kwargs
|
| 83 |
+
) -> List[TmxIndexConstituentsData]:
|
| 84 |
+
"""Return the transformed data."""
|
| 85 |
+
results = []
|
| 86 |
+
data = data.copy()
|
| 87 |
+
if data == {}:
|
| 88 |
+
raise EmptyDataError
|
| 89 |
+
if query.symbol not in data.get("indices"): # type: ignore
|
| 90 |
+
raise OpenBBError(f"Index {query.symbol} was not found. Check the symbol.")
|
| 91 |
+
index_data = data["indices"][query.symbol]
|
| 92 |
+
if (
|
| 93 |
+
index_data.get("nb_constituents") == 0
|
| 94 |
+
or index_data.get("constituents") is None
|
| 95 |
+
):
|
| 96 |
+
raise OpenBBError(f"No constituents found for index, {query.symbol}")
|
| 97 |
+
results = index_data["constituents"]
|
| 98 |
+
return [TmxIndexConstituentsData.model_validate(d) for d in results]
|
openbb_platform/providers/tmx/openbb_tmx/models/index_sectors.py
ADDED
|
@@ -0,0 +1,92 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Index Sectors fetcher."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from typing import Any, Dict, List, Optional
|
| 6 |
+
|
| 7 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 8 |
+
from openbb_core.provider.standard_models.index_sectors import (
|
| 9 |
+
IndexSectorsData,
|
| 10 |
+
IndexSectorsQueryParams,
|
| 11 |
+
)
|
| 12 |
+
from openbb_core.provider.utils.errors import EmptyDataError
|
| 13 |
+
from pydantic import Field, field_validator
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class TmxIndexSectorsQueryParams(IndexSectorsQueryParams):
|
| 17 |
+
"""TMX Index Sectors Query Params"""
|
| 18 |
+
|
| 19 |
+
use_cache: bool = Field(
|
| 20 |
+
default=True,
|
| 21 |
+
description="Whether to use a cached request. All Index data comes from a single JSON file that is updated daily."
|
| 22 |
+
+ " To bypass, set to False. If True, the data will be cached for 1 day.",
|
| 23 |
+
)
|
| 24 |
+
|
| 25 |
+
|
| 26 |
+
class TmxIndexSectorsData(IndexSectorsData):
|
| 27 |
+
"""TMX Index Sectors Data."""
|
| 28 |
+
|
| 29 |
+
@field_validator("weight", mode="after", check_fields=False)
|
| 30 |
+
@classmethod
|
| 31 |
+
def normalize_percent(cls, v):
|
| 32 |
+
"""Return percents as normalized percentage points."""
|
| 33 |
+
return float(v) / 100 if v else None
|
| 34 |
+
|
| 35 |
+
|
| 36 |
+
class TmxIndexSectorsFetcher(
|
| 37 |
+
Fetcher[
|
| 38 |
+
TmxIndexSectorsQueryParams,
|
| 39 |
+
List[TmxIndexSectorsData],
|
| 40 |
+
]
|
| 41 |
+
):
|
| 42 |
+
"""Transform the query, extract and transform the data from the TMX endpoints."""
|
| 43 |
+
|
| 44 |
+
@staticmethod
|
| 45 |
+
def transform_query(params: Dict[str, Any]) -> TmxIndexSectorsQueryParams:
|
| 46 |
+
"""Transform the query."""
|
| 47 |
+
return TmxIndexSectorsQueryParams(**params)
|
| 48 |
+
|
| 49 |
+
@staticmethod
|
| 50 |
+
async def aextract_data(
|
| 51 |
+
query: TmxIndexSectorsQueryParams,
|
| 52 |
+
credentials: Optional[Dict[str, str]],
|
| 53 |
+
**kwargs: Any,
|
| 54 |
+
) -> Dict:
|
| 55 |
+
"""Return the raw data from the TMX endpoint."""
|
| 56 |
+
# pylint: disable=import-outside-toplevel
|
| 57 |
+
from openbb_tmx.utils.helpers import get_data_from_url, get_indices_backend
|
| 58 |
+
|
| 59 |
+
url = "https://tmxinfoservices.com/files/indices/sptsx-indices.json"
|
| 60 |
+
|
| 61 |
+
data = await get_data_from_url(
|
| 62 |
+
url,
|
| 63 |
+
use_cache=query.use_cache,
|
| 64 |
+
backend=get_indices_backend(),
|
| 65 |
+
)
|
| 66 |
+
|
| 67 |
+
return data
|
| 68 |
+
|
| 69 |
+
@staticmethod
|
| 70 |
+
def transform_data(
|
| 71 |
+
query: TmxIndexSectorsQueryParams, data: Dict, **kwargs: Any
|
| 72 |
+
) -> List[TmxIndexSectorsData]:
|
| 73 |
+
"""Return the transformed data."""
|
| 74 |
+
results = []
|
| 75 |
+
data = data.copy()
|
| 76 |
+
if data == {}:
|
| 77 |
+
raise EmptyDataError
|
| 78 |
+
if (
|
| 79 |
+
query.symbol in data["indices"]
|
| 80 |
+
and "sectors" in data["indices"][query.symbol]
|
| 81 |
+
):
|
| 82 |
+
temp = data["indices"][query.symbol].get("sectors")
|
| 83 |
+
results = [
|
| 84 |
+
{
|
| 85 |
+
"sector": d.get("name"),
|
| 86 |
+
"weight": d.get("weight"),
|
| 87 |
+
}
|
| 88 |
+
for d in temp
|
| 89 |
+
if temp is not None
|
| 90 |
+
]
|
| 91 |
+
|
| 92 |
+
return [TmxIndexSectorsData.model_validate(d) for d in results]
|
openbb_platform/providers/tmx/openbb_tmx/models/index_snapshots.py
ADDED
|
@@ -0,0 +1,288 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Index Snapshots Model."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from typing import Any, Dict, List, Literal, Optional
|
| 6 |
+
|
| 7 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 8 |
+
from openbb_core.provider.standard_models.index_snapshots import (
|
| 9 |
+
IndexSnapshotsData,
|
| 10 |
+
IndexSnapshotsQueryParams,
|
| 11 |
+
)
|
| 12 |
+
from openbb_core.provider.utils.errors import EmptyDataError
|
| 13 |
+
from pydantic import Field, field_validator
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class TmxIndexSnapshotsQueryParams(IndexSnapshotsQueryParams):
|
| 17 |
+
"""TMX Index Snapshots Query Params."""
|
| 18 |
+
|
| 19 |
+
region: Optional[Literal["ca", "us"]] = Field(default="ca") # type: ignore
|
| 20 |
+
use_cache: bool = Field(
|
| 21 |
+
default=True,
|
| 22 |
+
description="Whether to use a cached request."
|
| 23 |
+
+ " Index data is from a single JSON file, updated each day after close."
|
| 24 |
+
+ " It is cached for one day. To bypass, set to False.",
|
| 25 |
+
)
|
| 26 |
+
|
| 27 |
+
|
| 28 |
+
class TmxIndexSnapshotsData(IndexSnapshotsData):
|
| 29 |
+
"""TMX Index Snapshots Data."""
|
| 30 |
+
|
| 31 |
+
__alias_dict__ = {
|
| 32 |
+
"name": "longname",
|
| 33 |
+
"prev_close": "prevClose",
|
| 34 |
+
"change": "priceChange",
|
| 35 |
+
"change_percent": "previousday",
|
| 36 |
+
"year_high": "weeks52high",
|
| 37 |
+
"year_low": "weeks52low",
|
| 38 |
+
"return_mtd": "monthtodate",
|
| 39 |
+
"return_qtd": "quartertodate",
|
| 40 |
+
"return_ytd": "yeartodate",
|
| 41 |
+
"total_market_value": "total",
|
| 42 |
+
"constituent_average_market_value": "average",
|
| 43 |
+
"constituent_median_market_value": "median",
|
| 44 |
+
"constituent_top10_market_value": "sumtop10",
|
| 45 |
+
"constituent_largest_market_value": "largest",
|
| 46 |
+
"constituent_largest_weight": "largestweight",
|
| 47 |
+
"constituent_smallest_market_value": "smallest",
|
| 48 |
+
"constituent_smallest_weight": "smallestweight",
|
| 49 |
+
}
|
| 50 |
+
year_high: Optional[float] = Field(
|
| 51 |
+
default=None, description="The 52-week high of the index."
|
| 52 |
+
)
|
| 53 |
+
year_low: Optional[float] = Field(
|
| 54 |
+
default=None, description="The 52-week low of the index."
|
| 55 |
+
)
|
| 56 |
+
return_mtd: Optional[float] = Field(
|
| 57 |
+
default=None,
|
| 58 |
+
description="The month-to-date return of the index, as a normalized percent.",
|
| 59 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 60 |
+
)
|
| 61 |
+
return_qtd: Optional[float] = Field(
|
| 62 |
+
default=None,
|
| 63 |
+
description="The quarter-to-date return of the index, as a normalized percent.",
|
| 64 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 65 |
+
)
|
| 66 |
+
return_ytd: Optional[float] = Field(
|
| 67 |
+
default=None,
|
| 68 |
+
description="The year-to-date return of the index, as a normalized percent.",
|
| 69 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 70 |
+
)
|
| 71 |
+
total_market_value: Optional[float] = Field(
|
| 72 |
+
default=None,
|
| 73 |
+
description="The total quoted market value of the index.",
|
| 74 |
+
)
|
| 75 |
+
number_of_constituents: Optional[int] = Field(
|
| 76 |
+
default=None,
|
| 77 |
+
description="The number of constituents in the index.",
|
| 78 |
+
)
|
| 79 |
+
constituent_average_market_value: Optional[float] = Field(
|
| 80 |
+
default=None,
|
| 81 |
+
description="The average quoted market value of the index constituents.",
|
| 82 |
+
)
|
| 83 |
+
constituent_median_market_value: Optional[float] = Field(
|
| 84 |
+
default=None,
|
| 85 |
+
description="The median quoted market value of the index constituents.",
|
| 86 |
+
)
|
| 87 |
+
constituent_top10_market_value: Optional[float] = Field(
|
| 88 |
+
default=None,
|
| 89 |
+
description="The sum of the top 10 quoted market values of the index constituents.",
|
| 90 |
+
)
|
| 91 |
+
constituent_largest_market_value: Optional[float] = Field(
|
| 92 |
+
default=None,
|
| 93 |
+
description="The largest quoted market value of the index constituents.",
|
| 94 |
+
)
|
| 95 |
+
constituent_largest_weight: Optional[float] = Field(
|
| 96 |
+
default=None,
|
| 97 |
+
description="The largest weight of the index constituents, as a normalized percent.",
|
| 98 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 99 |
+
)
|
| 100 |
+
constituent_smallest_market_value: Optional[float] = Field(
|
| 101 |
+
default=None,
|
| 102 |
+
description="The smallest quoted market value of the index constituents.",
|
| 103 |
+
)
|
| 104 |
+
constituent_smallest_weight: Optional[float] = Field(
|
| 105 |
+
default=None,
|
| 106 |
+
description="The smallest weight of the index constituents, as a normalized percent.",
|
| 107 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 108 |
+
)
|
| 109 |
+
|
| 110 |
+
@field_validator(
|
| 111 |
+
"return_mtd",
|
| 112 |
+
"return_qtd",
|
| 113 |
+
"return_ytd",
|
| 114 |
+
"change_percent",
|
| 115 |
+
"constituent_largest_weight",
|
| 116 |
+
"constituent_smallest_weight",
|
| 117 |
+
mode="before",
|
| 118 |
+
check_fields=False,
|
| 119 |
+
)
|
| 120 |
+
@classmethod
|
| 121 |
+
def normalize_percent(cls, v):
|
| 122 |
+
"""Return percents as normalized percentage points."""
|
| 123 |
+
return round(float(v) / 100, 6) if v else None
|
| 124 |
+
|
| 125 |
+
@field_validator(
|
| 126 |
+
"year_high",
|
| 127 |
+
"year_low",
|
| 128 |
+
"price",
|
| 129 |
+
"prev_close",
|
| 130 |
+
"change",
|
| 131 |
+
mode="before",
|
| 132 |
+
check_fields=False,
|
| 133 |
+
)
|
| 134 |
+
@classmethod
|
| 135 |
+
def round_floating(cls, v):
|
| 136 |
+
"""Round floating values to two decimals."""
|
| 137 |
+
return round(float(v), 2) if v else None
|
| 138 |
+
|
| 139 |
+
|
| 140 |
+
class TmxIndexSnapshotsFetcher(
|
| 141 |
+
Fetcher[
|
| 142 |
+
TmxIndexSnapshotsQueryParams,
|
| 143 |
+
List[TmxIndexSnapshotsData],
|
| 144 |
+
]
|
| 145 |
+
):
|
| 146 |
+
"""TMX Index Snapshots Fetcher."""
|
| 147 |
+
|
| 148 |
+
@staticmethod
|
| 149 |
+
def transform_query(params: Dict[str, Any]) -> TmxIndexSnapshotsQueryParams:
|
| 150 |
+
"""Transform the query."""
|
| 151 |
+
return TmxIndexSnapshotsQueryParams(**params)
|
| 152 |
+
|
| 153 |
+
@staticmethod
|
| 154 |
+
async def aextract_data(
|
| 155 |
+
query: TmxIndexSnapshotsQueryParams,
|
| 156 |
+
credentials: Optional[Dict[str, str]],
|
| 157 |
+
**kwargs: Any,
|
| 158 |
+
) -> List[Dict]:
|
| 159 |
+
"""Return the raw data from the TMX endpoint."""
|
| 160 |
+
# pylint: disable=import-outside-toplevel
|
| 161 |
+
import json # noqa
|
| 162 |
+
from openbb_tmx.utils import gql # noqa
|
| 163 |
+
from openbb_tmx.utils.helpers import ( # noqa
|
| 164 |
+
NASDAQ_GIDS,
|
| 165 |
+
get_data_from_gql,
|
| 166 |
+
get_data_from_url,
|
| 167 |
+
get_random_agent,
|
| 168 |
+
get_indices_backend,
|
| 169 |
+
)
|
| 170 |
+
|
| 171 |
+
url = "https://tmxinfoservices.com/files/indices/sptsx-indices.json"
|
| 172 |
+
user_agent = get_random_agent()
|
| 173 |
+
results = []
|
| 174 |
+
if query.region == "ca":
|
| 175 |
+
data = await get_data_from_url(
|
| 176 |
+
url,
|
| 177 |
+
use_cache=query.use_cache,
|
| 178 |
+
backend=get_indices_backend(),
|
| 179 |
+
)
|
| 180 |
+
if not data:
|
| 181 |
+
raise EmptyDataError
|
| 182 |
+
symbols = []
|
| 183 |
+
|
| 184 |
+
for symbol in data["indices"]:
|
| 185 |
+
symbols.append(symbol)
|
| 186 |
+
new_data = {}
|
| 187 |
+
performance = data["indices"][symbol].get("performance", {})
|
| 188 |
+
market_value = data["indices"][symbol].get("quotedmarketvalue", {})
|
| 189 |
+
new_data.update(
|
| 190 |
+
{
|
| 191 |
+
"symbol": symbol,
|
| 192 |
+
"name": data["indices"][symbol].get("name_en", None),
|
| 193 |
+
"currency": (
|
| 194 |
+
"USD"
|
| 195 |
+
if "(USD)" in data["indices"][symbol]["name_en"]
|
| 196 |
+
else "CAD"
|
| 197 |
+
),
|
| 198 |
+
**performance,
|
| 199 |
+
**market_value,
|
| 200 |
+
}
|
| 201 |
+
)
|
| 202 |
+
results.append(new_data)
|
| 203 |
+
|
| 204 |
+
# Get current levels for each index.
|
| 205 |
+
|
| 206 |
+
payload = gql.get_quote_for_symbols_payload.copy()
|
| 207 |
+
payload["variables"]["symbols"] = symbols
|
| 208 |
+
|
| 209 |
+
url = "https://app-money.tmx.com/graphql"
|
| 210 |
+
response = await get_data_from_gql(
|
| 211 |
+
method="POST",
|
| 212 |
+
url=url,
|
| 213 |
+
data=json.dumps(payload),
|
| 214 |
+
headers={
|
| 215 |
+
"authority": "app-money.tmx.com",
|
| 216 |
+
"referer": "https://money.tmx.com/en/quote/^TSX",
|
| 217 |
+
"locale": "en",
|
| 218 |
+
"Content-Type": "application/json",
|
| 219 |
+
"User-Agent": user_agent,
|
| 220 |
+
"Accept": "*/*",
|
| 221 |
+
},
|
| 222 |
+
timeout=5,
|
| 223 |
+
)
|
| 224 |
+
if response.get("data") and response["data"].get("getQuoteForSymbols"):
|
| 225 |
+
quote_data = response["data"]["getQuoteForSymbols"]
|
| 226 |
+
for d in data:
|
| 227 |
+
if "longname" in d:
|
| 228 |
+
d.pop("longname")
|
| 229 |
+
if "percentChange" in d:
|
| 230 |
+
d.pop("percentChange")
|
| 231 |
+
merged_list = [
|
| 232 |
+
{
|
| 233 |
+
**d1,
|
| 234 |
+
**next(
|
| 235 |
+
(d2 for d2 in quote_data if d2["symbol"] == d1["symbol"]),
|
| 236 |
+
{},
|
| 237 |
+
),
|
| 238 |
+
}
|
| 239 |
+
for d1 in results
|
| 240 |
+
]
|
| 241 |
+
results = merged_list
|
| 242 |
+
|
| 243 |
+
if query.region == "us":
|
| 244 |
+
symbols = [f"{symbol}:US" for symbol in NASDAQ_GIDS]
|
| 245 |
+
payload = gql.get_quote_for_symbols_payload.copy()
|
| 246 |
+
payload["variables"]["symbols"] = symbols
|
| 247 |
+
|
| 248 |
+
url = "https://app-money.tmx.com/graphql"
|
| 249 |
+
response = await get_data_from_gql(
|
| 250 |
+
method="POST",
|
| 251 |
+
url=url,
|
| 252 |
+
data=json.dumps(payload),
|
| 253 |
+
headers={
|
| 254 |
+
"authority": "app-money.tmx.com",
|
| 255 |
+
"referer": "https://money.tmx.com/en/quote/^TSX",
|
| 256 |
+
"locale": "en",
|
| 257 |
+
"Content-Type": "application/json",
|
| 258 |
+
"User-Agent": user_agent,
|
| 259 |
+
"Accept": "*/*",
|
| 260 |
+
},
|
| 261 |
+
timeout=5,
|
| 262 |
+
)
|
| 263 |
+
if response.get("data") and response["data"].get("getQuoteForSymbols"):
|
| 264 |
+
results = response["data"]["getQuoteForSymbols"]
|
| 265 |
+
for item in results:
|
| 266 |
+
item["change_percent"] = item.pop("percentChange")
|
| 267 |
+
|
| 268 |
+
return results
|
| 269 |
+
|
| 270 |
+
@staticmethod
|
| 271 |
+
def transform_data(
|
| 272 |
+
query: TmxIndexSnapshotsQueryParams,
|
| 273 |
+
data: List[Dict],
|
| 274 |
+
**kwargs: Any,
|
| 275 |
+
) -> List[TmxIndexSnapshotsData]:
|
| 276 |
+
"""Return the transformed data."""
|
| 277 |
+
return [
|
| 278 |
+
TmxIndexSnapshotsData.model_validate(
|
| 279 |
+
{
|
| 280 |
+
"change_percent" if k == "percentChange" else k: (
|
| 281 |
+
None if v in ["", 0] else v
|
| 282 |
+
)
|
| 283 |
+
for k, v in d.items()
|
| 284 |
+
}
|
| 285 |
+
)
|
| 286 |
+
for d in data
|
| 287 |
+
if "price" in d and d["price"] is not None
|
| 288 |
+
]
|
openbb_platform/providers/tmx/openbb_tmx/models/insider_trading.py
ADDED
|
@@ -0,0 +1,173 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Insider Trading Model."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from typing import Any, Dict, List, Optional
|
| 6 |
+
|
| 7 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 8 |
+
from openbb_core.provider.standard_models.insider_trading import (
|
| 9 |
+
InsiderTradingData,
|
| 10 |
+
InsiderTradingQueryParams,
|
| 11 |
+
)
|
| 12 |
+
from openbb_core.provider.utils.errors import EmptyDataError
|
| 13 |
+
from pydantic import Field, field_validator
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class TmxInsiderTradingQueryParams(InsiderTradingQueryParams):
|
| 17 |
+
"""TMX Insider Trading Query Params."""
|
| 18 |
+
|
| 19 |
+
summary: bool = Field(
|
| 20 |
+
default=False,
|
| 21 |
+
description="Return a summary of the insider activity instead of the individuals.",
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
|
| 25 |
+
class TmxInsiderTradingData(InsiderTradingData):
|
| 26 |
+
"""TMX Insider Trading Data."""
|
| 27 |
+
|
| 28 |
+
period: str = Field(
|
| 29 |
+
description="The period of the activity. Bucketed by three, six, and twelve months."
|
| 30 |
+
)
|
| 31 |
+
owner_name: Optional[str] = Field(
|
| 32 |
+
default=None, description="The name of the insider."
|
| 33 |
+
)
|
| 34 |
+
acquisition_or_deposition: Optional[str] = Field(
|
| 35 |
+
default=None, description="Whether the insider bought or sold the shares."
|
| 36 |
+
)
|
| 37 |
+
number_of_trades: Optional[int] = Field(
|
| 38 |
+
default=None, description="The number of shares traded over the period."
|
| 39 |
+
)
|
| 40 |
+
securities_owned: Optional[int] = Field(
|
| 41 |
+
default=None, description="The number of shares held by the insider."
|
| 42 |
+
)
|
| 43 |
+
trade_value: Optional[float] = Field(
|
| 44 |
+
default=None, description="The value of the shares traded by the insider."
|
| 45 |
+
)
|
| 46 |
+
securities_transacted: Optional[int] = Field(
|
| 47 |
+
default=None,
|
| 48 |
+
description="The total number of shares traded by the insider over the period.",
|
| 49 |
+
)
|
| 50 |
+
securities_bought: Optional[int] = Field(
|
| 51 |
+
default=None,
|
| 52 |
+
description="The total number of shares bought by all insiders over the period.",
|
| 53 |
+
)
|
| 54 |
+
securities_sold: Optional[int] = Field(
|
| 55 |
+
default=None,
|
| 56 |
+
description="The total number of shares sold by all insiders over the period.",
|
| 57 |
+
)
|
| 58 |
+
net_activity: Optional[int] = Field(
|
| 59 |
+
default=None,
|
| 60 |
+
description="The total net activity by all insiders over the period.",
|
| 61 |
+
)
|
| 62 |
+
|
| 63 |
+
@field_validator("period", mode="before", check_fields=False)
|
| 64 |
+
@classmethod
|
| 65 |
+
def period_to_snake_case(cls, v):
|
| 66 |
+
"""Convert the period to snake case."""
|
| 67 |
+
# pylint: disable=import-outside-toplevel
|
| 68 |
+
from openbb_core.provider.utils.helpers import to_snake_case
|
| 69 |
+
|
| 70 |
+
return to_snake_case(v) if v else None
|
| 71 |
+
|
| 72 |
+
|
| 73 |
+
class TmxInsiderTradingFetcher(
|
| 74 |
+
Fetcher[
|
| 75 |
+
TmxInsiderTradingQueryParams,
|
| 76 |
+
List[TmxInsiderTradingData],
|
| 77 |
+
]
|
| 78 |
+
):
|
| 79 |
+
"""TMX Insider Trading Fetcher."""
|
| 80 |
+
|
| 81 |
+
@staticmethod
|
| 82 |
+
def transform_query(params: Dict[str, Any]) -> TmxInsiderTradingQueryParams:
|
| 83 |
+
"""Transform the query."""
|
| 84 |
+
return TmxInsiderTradingQueryParams(**params)
|
| 85 |
+
|
| 86 |
+
@staticmethod
|
| 87 |
+
async def aextract_data(
|
| 88 |
+
query: TmxInsiderTradingQueryParams,
|
| 89 |
+
credentials: Optional[Dict[str, str]],
|
| 90 |
+
**kwargs: Any,
|
| 91 |
+
) -> List[Dict]:
|
| 92 |
+
"""Return the raw data from the TMX endpoint."""
|
| 93 |
+
# pylint: disable=import-outside-toplevel
|
| 94 |
+
import json # noqa
|
| 95 |
+
from openbb_tmx.utils import gql # noqa
|
| 96 |
+
from openbb_tmx.utils.helpers import get_data_from_gql, get_random_agent # noqa
|
| 97 |
+
|
| 98 |
+
results: List = []
|
| 99 |
+
user_agent = get_random_agent()
|
| 100 |
+
symbol = (
|
| 101 |
+
query.symbol.upper()
|
| 102 |
+
.replace("-", ".")
|
| 103 |
+
.replace(".TO", "")
|
| 104 |
+
.replace(".TSX", "")
|
| 105 |
+
)
|
| 106 |
+
payload = gql.get_company_insiders_payload.copy()
|
| 107 |
+
payload["variables"]["symbol"] = symbol
|
| 108 |
+
|
| 109 |
+
url = "https://app-money.tmx.com/graphql"
|
| 110 |
+
response = await get_data_from_gql(
|
| 111 |
+
method="POST",
|
| 112 |
+
url=url,
|
| 113 |
+
data=json.dumps(payload),
|
| 114 |
+
headers={
|
| 115 |
+
"authority": "app-money.tmx.com",
|
| 116 |
+
"referer": f"https://money.tmx.com/en/quote/{symbol}",
|
| 117 |
+
"locale": "en",
|
| 118 |
+
"Content-Type": "application/json",
|
| 119 |
+
"User-Agent": user_agent,
|
| 120 |
+
"Accept": "*/*",
|
| 121 |
+
},
|
| 122 |
+
timeout=5,
|
| 123 |
+
)
|
| 124 |
+
|
| 125 |
+
if response.get("data") and response["data"].get( # type: ignore
|
| 126 |
+
"getCompanyInsidersActivities"
|
| 127 |
+
):
|
| 128 |
+
results = response["data"]["getCompanyInsidersActivities"] # type: ignore
|
| 129 |
+
|
| 130 |
+
if not results:
|
| 131 |
+
raise EmptyDataError()
|
| 132 |
+
|
| 133 |
+
return results
|
| 134 |
+
|
| 135 |
+
@staticmethod
|
| 136 |
+
def transform_data(
|
| 137 |
+
query: TmxInsiderTradingQueryParams,
|
| 138 |
+
data: List[Dict],
|
| 139 |
+
**kwargs: Any,
|
| 140 |
+
) -> List[TmxInsiderTradingData]:
|
| 141 |
+
"""Transform the data."""
|
| 142 |
+
data = data.copy()
|
| 143 |
+
results = []
|
| 144 |
+
flattened_insiders = []
|
| 145 |
+
for activity in data["insiderActivities"]: # type: ignore
|
| 146 |
+
for transaction_type in ["buy", "sell"]:
|
| 147 |
+
for transaction in activity[transaction_type]:
|
| 148 |
+
new_transaction = {
|
| 149 |
+
"period": activity["periodkey"],
|
| 150 |
+
"acquisition_or_disposition": transaction_type,
|
| 151 |
+
"owner_name": transaction["name"],
|
| 152 |
+
"number_of_trades": transaction["trades"],
|
| 153 |
+
"securities_transacted": transaction["shares"],
|
| 154 |
+
"securities_owned": transaction["sharesHeld"],
|
| 155 |
+
"trade_value": transaction["tradeValue"],
|
| 156 |
+
}
|
| 157 |
+
flattened_insiders.append(new_transaction)
|
| 158 |
+
flattened_summary = []
|
| 159 |
+
for activity in data["activitySummary"]: # type: ignore
|
| 160 |
+
new_activity = {
|
| 161 |
+
"period": activity["periodkey"],
|
| 162 |
+
"securities_bought": activity["buyShares"],
|
| 163 |
+
"securities_sold": activity["soldShares"],
|
| 164 |
+
"net_activity": activity["netActivity"],
|
| 165 |
+
"securities_transacted": activity["totalShares"],
|
| 166 |
+
}
|
| 167 |
+
flattened_summary.append(new_activity)
|
| 168 |
+
if query.summary is False and len(flattened_insiders) > 0:
|
| 169 |
+
results = flattened_insiders
|
| 170 |
+
elif query.summary is True and len(flattened_summary) > 0:
|
| 171 |
+
results = flattened_summary
|
| 172 |
+
|
| 173 |
+
return [TmxInsiderTradingData.model_validate(d) for d in results]
|
openbb_platform/providers/tmx/openbb_tmx/models/options_chains.py
ADDED
|
@@ -0,0 +1,115 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Options Chains Model."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
from datetime import (
|
| 5 |
+
date as dateType,
|
| 6 |
+
datetime,
|
| 7 |
+
)
|
| 8 |
+
from typing import Any, Dict, List, Optional, Union
|
| 9 |
+
|
| 10 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 11 |
+
from openbb_core.provider.standard_models.options_chains import (
|
| 12 |
+
OptionsChainsData,
|
| 13 |
+
OptionsChainsQueryParams,
|
| 14 |
+
)
|
| 15 |
+
from openbb_core.provider.utils.descriptions import (
|
| 16 |
+
QUERY_DESCRIPTIONS,
|
| 17 |
+
)
|
| 18 |
+
from pydantic import Field, field_validator
|
| 19 |
+
|
| 20 |
+
|
| 21 |
+
class TmxOptionsChainsQueryParams(OptionsChainsQueryParams):
|
| 22 |
+
"""TMX Options Chains Query.
|
| 23 |
+
|
| 24 |
+
Source: https://www.Tmx.com/
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
date: Optional[dateType] = Field(
|
| 28 |
+
description=QUERY_DESCRIPTIONS.get("date", ""),
|
| 29 |
+
default=None,
|
| 30 |
+
)
|
| 31 |
+
use_cache: bool = Field(
|
| 32 |
+
default=True,
|
| 33 |
+
description="Caching is used to validate the supplied ticker symbol, or if a historical EOD chain is requested."
|
| 34 |
+
+ " To bypass, set to False.",
|
| 35 |
+
)
|
| 36 |
+
|
| 37 |
+
|
| 38 |
+
class TmxOptionsChainsData(OptionsChainsData):
|
| 39 |
+
"""TMX Options Chains Data."""
|
| 40 |
+
|
| 41 |
+
__doc__ = OptionsChainsData.__doc__
|
| 42 |
+
|
| 43 |
+
transactions: List[Union[int, None]] = Field(
|
| 44 |
+
default_factory=list, description="Number of transactions for the contract."
|
| 45 |
+
)
|
| 46 |
+
total_value: List[Union[float, None]] = Field(
|
| 47 |
+
default_factory=list,
|
| 48 |
+
description="Total value of the transactions.",
|
| 49 |
+
)
|
| 50 |
+
settlement_price: List[Union[float, None]] = Field(
|
| 51 |
+
default_factory=list,
|
| 52 |
+
description="Settlement price on that date.",
|
| 53 |
+
)
|
| 54 |
+
|
| 55 |
+
@field_validator("expiration", mode="before", check_fields=False)
|
| 56 |
+
@classmethod
|
| 57 |
+
def date_validate(cls, v):
|
| 58 |
+
"""Return the datetime object from the date string"""
|
| 59 |
+
return [datetime.strptime(d, "%Y-%m-%d") for d in v]
|
| 60 |
+
|
| 61 |
+
|
| 62 |
+
class TmxOptionsChainsFetcher(
|
| 63 |
+
Fetcher[
|
| 64 |
+
TmxOptionsChainsQueryParams,
|
| 65 |
+
TmxOptionsChainsData,
|
| 66 |
+
]
|
| 67 |
+
):
|
| 68 |
+
"""TMX Options Chains Fetcher."""
|
| 69 |
+
|
| 70 |
+
@staticmethod
|
| 71 |
+
def transform_query(params: Dict[str, Any]) -> TmxOptionsChainsQueryParams:
|
| 72 |
+
"""Transform the query."""
|
| 73 |
+
return TmxOptionsChainsQueryParams(**params)
|
| 74 |
+
|
| 75 |
+
@staticmethod
|
| 76 |
+
async def aextract_data(
|
| 77 |
+
query: TmxOptionsChainsQueryParams,
|
| 78 |
+
credentials: Optional[Dict[str, str]],
|
| 79 |
+
**kwargs: Any,
|
| 80 |
+
) -> Dict:
|
| 81 |
+
"""Return the data."""
|
| 82 |
+
# pylint: disable=import-outside-toplevel
|
| 83 |
+
from openbb_tmx.models.equity_quote import TmxEquityQuoteFetcher
|
| 84 |
+
from openbb_tmx.utils.helpers import download_eod_chains, get_current_options
|
| 85 |
+
from pandas import DataFrame
|
| 86 |
+
|
| 87 |
+
results: Dict = {}
|
| 88 |
+
chains = DataFrame()
|
| 89 |
+
if query.date is not None:
|
| 90 |
+
chains = await download_eod_chains(
|
| 91 |
+
symbol=query.symbol, date=query.date, use_cache=query.use_cache
|
| 92 |
+
)
|
| 93 |
+
else:
|
| 94 |
+
chains = await get_current_options(query.symbol, use_cache=query.use_cache)
|
| 95 |
+
underlying_quote = await TmxEquityQuoteFetcher.fetch_data(
|
| 96 |
+
{"symbol": query.symbol}, credentials
|
| 97 |
+
)
|
| 98 |
+
underlying_price = underlying_quote[0].last_price # type: ignore
|
| 99 |
+
if underlying_price and not chains.empty:
|
| 100 |
+
chains["underlying_price"] = underlying_price
|
| 101 |
+
chains["underlying_symbol"] = query.symbol + ":CA"
|
| 102 |
+
|
| 103 |
+
if not chains.empty:
|
| 104 |
+
results = chains.to_dict(orient="list")
|
| 105 |
+
|
| 106 |
+
return results
|
| 107 |
+
|
| 108 |
+
@staticmethod
|
| 109 |
+
def transform_data(
|
| 110 |
+
query: TmxOptionsChainsQueryParams,
|
| 111 |
+
data: Dict,
|
| 112 |
+
**kwargs: Any,
|
| 113 |
+
) -> TmxOptionsChainsData:
|
| 114 |
+
"""Transform the data and validate the model."""
|
| 115 |
+
return TmxOptionsChainsData.model_validate(data)
|
openbb_platform/providers/tmx/openbb_tmx/models/price_target_consensus.py
ADDED
|
@@ -0,0 +1,172 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Stock Analysts Model."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from typing import Any, Dict, List, Optional
|
| 6 |
+
|
| 7 |
+
from openbb_core.app.model.abstract.error import OpenBBError
|
| 8 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 9 |
+
from openbb_core.provider.standard_models.price_target_consensus import (
|
| 10 |
+
PriceTargetConsensusData,
|
| 11 |
+
PriceTargetConsensusQueryParams,
|
| 12 |
+
)
|
| 13 |
+
from pydantic import Field, field_validator
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
class TmxPriceTargetConsensusQueryParams(PriceTargetConsensusQueryParams):
|
| 17 |
+
"""TMX Price Target Consensus Query."""
|
| 18 |
+
|
| 19 |
+
__json_schema_extra__ = {"symbol": {"multiple_items_allowed": True}}
|
| 20 |
+
|
| 21 |
+
@field_validator("symbol", mode="before", check_fields=False)
|
| 22 |
+
@classmethod
|
| 23 |
+
def check_symbol(cls, value):
|
| 24 |
+
"""Check the symbol."""
|
| 25 |
+
if not value:
|
| 26 |
+
raise OpenBBError("Symbol is a required field for TMX.")
|
| 27 |
+
return value
|
| 28 |
+
|
| 29 |
+
|
| 30 |
+
class TmxPriceTargetConsensusData(PriceTargetConsensusData):
|
| 31 |
+
"""TMX Price Target Consensus Data."""
|
| 32 |
+
|
| 33 |
+
__alias_dict__ = {
|
| 34 |
+
"target_consensus": "price_target",
|
| 35 |
+
"target_high": "price_target_high",
|
| 36 |
+
"target_low": "price_target_low",
|
| 37 |
+
"target_upside": "price_target_upside",
|
| 38 |
+
}
|
| 39 |
+
|
| 40 |
+
target_upside: Optional[float] = Field(
|
| 41 |
+
default=None,
|
| 42 |
+
description="Percent of upside, as a normalized percent.",
|
| 43 |
+
json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100},
|
| 44 |
+
)
|
| 45 |
+
total_analysts: Optional[int] = Field(
|
| 46 |
+
default=None, description="Total number of analyst."
|
| 47 |
+
)
|
| 48 |
+
buy_ratings: Optional[int] = Field(
|
| 49 |
+
default=None, description="Number of buy ratings."
|
| 50 |
+
)
|
| 51 |
+
sell_ratings: Optional[int] = Field(
|
| 52 |
+
default=None, description="Number of sell ratings."
|
| 53 |
+
)
|
| 54 |
+
hold_ratings: Optional[int] = Field(
|
| 55 |
+
default=None, description="Number of hold ratings."
|
| 56 |
+
)
|
| 57 |
+
consensus_action: Optional[str] = Field(
|
| 58 |
+
default=None, description="Consensus action."
|
| 59 |
+
)
|
| 60 |
+
|
| 61 |
+
@field_validator("target_upside", mode="before", check_fields=False)
|
| 62 |
+
@classmethod
|
| 63 |
+
def normalize_percent(cls, v):
|
| 64 |
+
"""Return percents as normalized percentage points."""
|
| 65 |
+
return float(v) / 100 if v else None
|
| 66 |
+
|
| 67 |
+
@field_validator(
|
| 68 |
+
"target_consensus",
|
| 69 |
+
"target_high",
|
| 70 |
+
"target_low",
|
| 71 |
+
mode="before",
|
| 72 |
+
check_fields=False,
|
| 73 |
+
)
|
| 74 |
+
@classmethod
|
| 75 |
+
def round_targets(cls, v):
|
| 76 |
+
"""Return rounded prices to two decimals."""
|
| 77 |
+
return round(float(v), 2) if v else None
|
| 78 |
+
|
| 79 |
+
|
| 80 |
+
class TmxPriceTargetConsensusFetcher(
|
| 81 |
+
Fetcher[TmxPriceTargetConsensusQueryParams, List[TmxPriceTargetConsensusData]]
|
| 82 |
+
):
|
| 83 |
+
"""Transform the query, extract and transform the data from the TMX endpoints."""
|
| 84 |
+
|
| 85 |
+
@staticmethod
|
| 86 |
+
def transform_query(params: Dict[str, Any]) -> TmxPriceTargetConsensusQueryParams:
|
| 87 |
+
"""Transform the query."""
|
| 88 |
+
return TmxPriceTargetConsensusQueryParams(**params)
|
| 89 |
+
|
| 90 |
+
@staticmethod
|
| 91 |
+
async def aextract_data(
|
| 92 |
+
query: TmxPriceTargetConsensusQueryParams,
|
| 93 |
+
credentials: Optional[Dict[str, str]],
|
| 94 |
+
**kwargs: Any,
|
| 95 |
+
) -> List[Dict]:
|
| 96 |
+
"""Return the raw data from the TMX endpoint."""
|
| 97 |
+
# pylint: disable=import-outside-toplevel
|
| 98 |
+
import asyncio # noqa
|
| 99 |
+
import json # noqa
|
| 100 |
+
from openbb_tmx.utils import gql # noqa
|
| 101 |
+
from openbb_tmx.utils.helpers import get_data_from_gql, get_random_agent # noqa
|
| 102 |
+
|
| 103 |
+
symbols = query.symbol.split(",") # type: ignore
|
| 104 |
+
results: List[Dict] = []
|
| 105 |
+
|
| 106 |
+
async def create_task(symbol, results):
|
| 107 |
+
"""Create a task for each symbol provided."""
|
| 108 |
+
symbol = (
|
| 109 |
+
symbol.upper()
|
| 110 |
+
.replace("-", ".")
|
| 111 |
+
.replace(".TO", "")
|
| 112 |
+
.replace(".TSXV", "")
|
| 113 |
+
.replace(".TSX", "")
|
| 114 |
+
)
|
| 115 |
+
|
| 116 |
+
payload = gql.get_company_analysts_payload.copy()
|
| 117 |
+
payload["variables"]["symbol"] = symbol
|
| 118 |
+
payload["variables"]["datatype"] = "equity"
|
| 119 |
+
|
| 120 |
+
data = {}
|
| 121 |
+
url = "https://app-money.tmx.com/graphql"
|
| 122 |
+
response = await get_data_from_gql(
|
| 123 |
+
method="POST",
|
| 124 |
+
url=url,
|
| 125 |
+
data=json.dumps(payload),
|
| 126 |
+
headers={
|
| 127 |
+
"authority": "app-money.tmx.com",
|
| 128 |
+
"referer": f"https://money.tmx.com/en/quote/{symbol}",
|
| 129 |
+
"locale": "en",
|
| 130 |
+
"Content-Type": "application/json",
|
| 131 |
+
"User-Agent": get_random_agent(),
|
| 132 |
+
"Accept": "*/*",
|
| 133 |
+
},
|
| 134 |
+
timeout=10,
|
| 135 |
+
)
|
| 136 |
+
r_data = (
|
| 137 |
+
response["data"].get("analysts", None) if response.get("data") else None
|
| 138 |
+
)
|
| 139 |
+
if r_data:
|
| 140 |
+
data.update(
|
| 141 |
+
{
|
| 142 |
+
"symbol": symbol,
|
| 143 |
+
"total_analysts": r_data["totalAnalysts"],
|
| 144 |
+
"consensus_action": r_data["consensusAnalysts"]["consensus"],
|
| 145 |
+
"buy_ratings": r_data["consensusAnalysts"]["buy"],
|
| 146 |
+
"sell_ratings": r_data["consensusAnalysts"]["sell"],
|
| 147 |
+
"hold_ratings": r_data["consensusAnalysts"]["hold"],
|
| 148 |
+
"price_target": r_data["priceTarget"]["priceTarget"],
|
| 149 |
+
"price_target_high": r_data["priceTarget"]["highPriceTarget"],
|
| 150 |
+
"price_target_low": r_data["priceTarget"]["lowPriceTarget"],
|
| 151 |
+
"price_target_upside": r_data["priceTarget"][
|
| 152 |
+
"priceTargetUpside"
|
| 153 |
+
],
|
| 154 |
+
}
|
| 155 |
+
)
|
| 156 |
+
results.append(data)
|
| 157 |
+
return results
|
| 158 |
+
|
| 159 |
+
tasks = [create_task(symbol, results) for symbol in symbols]
|
| 160 |
+
|
| 161 |
+
await asyncio.gather(*tasks)
|
| 162 |
+
|
| 163 |
+
return results
|
| 164 |
+
|
| 165 |
+
@staticmethod
|
| 166 |
+
def transform_data(
|
| 167 |
+
query: TmxPriceTargetConsensusQueryParams,
|
| 168 |
+
data: List[Dict],
|
| 169 |
+
**kwargs: Any,
|
| 170 |
+
) -> List[TmxPriceTargetConsensusData]:
|
| 171 |
+
"""Return the transformed data."""
|
| 172 |
+
return [TmxPriceTargetConsensusData.model_validate(d) for d in data]
|
openbb_platform/providers/tmx/openbb_tmx/models/treasury_prices.py
ADDED
|
@@ -0,0 +1,161 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Treasury Prices Fetcher."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=unused-argument
|
| 4 |
+
|
| 5 |
+
from datetime import date as dateType
|
| 6 |
+
from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional
|
| 7 |
+
|
| 8 |
+
from openbb_core.provider.abstract.fetcher import Fetcher
|
| 9 |
+
from openbb_core.provider.standard_models.treasury_prices import (
|
| 10 |
+
TreasuryPricesData,
|
| 11 |
+
TreasuryPricesQueryParams,
|
| 12 |
+
)
|
| 13 |
+
from pydantic import Field, field_validator
|
| 14 |
+
|
| 15 |
+
if TYPE_CHECKING:
|
| 16 |
+
from pandas import DataFrame
|
| 17 |
+
|
| 18 |
+
|
| 19 |
+
class TmxTreasuryPricesQueryParams(TreasuryPricesQueryParams):
|
| 20 |
+
"""TMX Treasury Prices Query Params.
|
| 21 |
+
|
| 22 |
+
Data will be made available by 5:00 EST on T+1
|
| 23 |
+
|
| 24 |
+
Source: https://bondtradedata.iiroc.ca/#/
|
| 25 |
+
"""
|
| 26 |
+
|
| 27 |
+
govt_type: Literal["federal", "provincial", "municipal"] = Field(
|
| 28 |
+
default="federal",
|
| 29 |
+
description="The level of government issuer.",
|
| 30 |
+
)
|
| 31 |
+
issue_date_min: Optional[dateType] = Field(
|
| 32 |
+
default=None,
|
| 33 |
+
description="Filter by the minimum original issue date.",
|
| 34 |
+
)
|
| 35 |
+
issue_date_max: Optional[dateType] = Field(
|
| 36 |
+
default=None,
|
| 37 |
+
description="Filter by the maximum original issue date.",
|
| 38 |
+
)
|
| 39 |
+
last_traded_min: Optional[dateType] = Field(
|
| 40 |
+
default=None,
|
| 41 |
+
description="Filter by the minimum last trade date.",
|
| 42 |
+
)
|
| 43 |
+
maturity_date_min: Optional[dateType] = Field(
|
| 44 |
+
default=None,
|
| 45 |
+
description="Filter by the minimum maturity date.",
|
| 46 |
+
)
|
| 47 |
+
maturity_date_max: Optional[dateType] = Field(
|
| 48 |
+
default=None,
|
| 49 |
+
description="Filter by the maximum maturity date.",
|
| 50 |
+
)
|
| 51 |
+
use_cache: bool = Field(
|
| 52 |
+
default=True,
|
| 53 |
+
description="All bond data is sourced from a single JSON file that is updated daily."
|
| 54 |
+
+ " The file is cached for one day to eliminate downloading more than once."
|
| 55 |
+
+ " Caching will significantly speed up subsequent queries. To bypass, set to False.",
|
| 56 |
+
)
|
| 57 |
+
|
| 58 |
+
|
| 59 |
+
class TmxTreasuryPricesData(TreasuryPricesData):
|
| 60 |
+
"""TMX Treasury Prices Data."""
|
| 61 |
+
|
| 62 |
+
__alias_dict__ = {
|
| 63 |
+
"rate": "couponRate",
|
| 64 |
+
"ytm": "lastYield",
|
| 65 |
+
"last_price": "lastPrice",
|
| 66 |
+
"highest_price": "highestPrice",
|
| 67 |
+
"lowest_price": "lowestPrice",
|
| 68 |
+
"total_trades": "totalTrades",
|
| 69 |
+
"last_traded_date": "lastTradedDate",
|
| 70 |
+
"maturity_date": "maturityDate",
|
| 71 |
+
"issue_date": "originalIssueDate",
|
| 72 |
+
"issuer_name": "issuer",
|
| 73 |
+
}
|
| 74 |
+
|
| 75 |
+
@field_validator(
|
| 76 |
+
"ytm",
|
| 77 |
+
"rate",
|
| 78 |
+
mode="before",
|
| 79 |
+
check_fields=False,
|
| 80 |
+
)
|
| 81 |
+
@classmethod
|
| 82 |
+
def normalize_percent(cls, v):
|
| 83 |
+
"""Return percents as normalized percentage points."""
|
| 84 |
+
return float(v) / 100 if v else None
|
| 85 |
+
|
| 86 |
+
|
| 87 |
+
class TmxTreasuryPricesFetcher(
|
| 88 |
+
Fetcher[
|
| 89 |
+
TmxTreasuryPricesQueryParams,
|
| 90 |
+
List[TmxTreasuryPricesData],
|
| 91 |
+
]
|
| 92 |
+
):
|
| 93 |
+
"""Tmx Bond Reference Fetcher."""
|
| 94 |
+
|
| 95 |
+
@staticmethod
|
| 96 |
+
def transform_query(params: Dict[str, Any]) -> TmxTreasuryPricesQueryParams:
|
| 97 |
+
"""Transform query params."""
|
| 98 |
+
# pylint: disable=import-outside-toplevel
|
| 99 |
+
from datetime import timedelta
|
| 100 |
+
|
| 101 |
+
transformed_params = params.copy()
|
| 102 |
+
yesterday = dateType.today() - timedelta(days=1)
|
| 103 |
+
last_bd = (
|
| 104 |
+
yesterday - timedelta(yesterday.weekday() - 4)
|
| 105 |
+
if yesterday.weekday() > 4
|
| 106 |
+
else yesterday
|
| 107 |
+
)
|
| 108 |
+
if "maturity_date_min" not in transformed_params:
|
| 109 |
+
transformed_params["maturity_date_min"] = last_bd
|
| 110 |
+
return TmxTreasuryPricesQueryParams(**transformed_params)
|
| 111 |
+
|
| 112 |
+
@staticmethod
|
| 113 |
+
async def aextract_data(
|
| 114 |
+
query: TmxTreasuryPricesQueryParams,
|
| 115 |
+
credentials: Optional[Dict[str, str]],
|
| 116 |
+
**kwargs: Any,
|
| 117 |
+
) -> "DataFrame":
|
| 118 |
+
"""Get the raw data containing all bond data."""
|
| 119 |
+
# pylint: disable=import-outside-toplevel
|
| 120 |
+
from openbb_tmx.utils.helpers import get_all_bonds
|
| 121 |
+
|
| 122 |
+
bonds = await get_all_bonds(use_cache=query.use_cache)
|
| 123 |
+
|
| 124 |
+
return bonds
|
| 125 |
+
|
| 126 |
+
@staticmethod
|
| 127 |
+
def transform_data(
|
| 128 |
+
query: TmxTreasuryPricesQueryParams,
|
| 129 |
+
data: "DataFrame",
|
| 130 |
+
**kwargs: Any,
|
| 131 |
+
) -> List[TmxTreasuryPricesData]:
|
| 132 |
+
"""Transform data."""
|
| 133 |
+
bonds = data.copy()
|
| 134 |
+
results = []
|
| 135 |
+
govt_type_dict = {
|
| 136 |
+
"provincial": "prov",
|
| 137 |
+
"federal": "government of canada",
|
| 138 |
+
"municipal": "municipal",
|
| 139 |
+
}
|
| 140 |
+
govt_type = govt_type_dict[query.govt_type] # noqa # pylint: disable=W0612
|
| 141 |
+
data = bonds.query(
|
| 142 |
+
"maturityDate >= @query.maturity_date_min.strftime('%Y-%m-%d')"
|
| 143 |
+
+ " & bondType == 'Govt'"
|
| 144 |
+
+ " & issuer.str.contains(@govt_type, case=False)"
|
| 145 |
+
).sort_values(by=["maturityDate"])
|
| 146 |
+
data.issuer = data.loc[:, "issuer"].str.strip()
|
| 147 |
+
if query.maturity_date_max:
|
| 148 |
+
data = data.query(
|
| 149 |
+
"maturityDate <= @query.maturity_date_max.strftime('%Y-%m-%d')"
|
| 150 |
+
)
|
| 151 |
+
if query.last_traded_min:
|
| 152 |
+
data = data.query(
|
| 153 |
+
"lastTradedDate >= @query.last_traded_min.strftime('%Y-%m-%d')"
|
| 154 |
+
)
|
| 155 |
+
|
| 156 |
+
if len(data) > 0:
|
| 157 |
+
data = data.drop(columns=["bondType", "securityId", "secKey"])
|
| 158 |
+
data = data.fillna("N/A").replace("N/A", None)
|
| 159 |
+
results = data.to_dict("records")
|
| 160 |
+
|
| 161 |
+
return [TmxTreasuryPricesData.model_validate(d) for d in results]
|
openbb_platform/providers/tmx/openbb_tmx/py.typed
ADDED
|
File without changes
|
openbb_platform/providers/tmx/openbb_tmx/utils/__init__.py
ADDED
|
@@ -0,0 +1 @@
|
|
|
|
|
|
|
| 1 |
+
"""TMX Provider Utils."""
|
openbb_platform/providers/tmx/openbb_tmx/utils/gql.py
ADDED
|
@@ -0,0 +1,513 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""GraphQL query definitions."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=line-too-long
|
| 4 |
+
# ruff: noqa: E501
|
| 5 |
+
|
| 6 |
+
stock_info_query = """ query getQuoteBySymbol(
|
| 7 |
+
$symbol: String,
|
| 8 |
+
$locale: String
|
| 9 |
+
) {
|
| 10 |
+
getQuoteBySymbol(symbol: $symbol, locale: $locale) {
|
| 11 |
+
symbol
|
| 12 |
+
name
|
| 13 |
+
price
|
| 14 |
+
priceChange
|
| 15 |
+
percentChange
|
| 16 |
+
exchangeName
|
| 17 |
+
exShortName
|
| 18 |
+
exchangeCode
|
| 19 |
+
sector
|
| 20 |
+
industry
|
| 21 |
+
volume
|
| 22 |
+
openPrice
|
| 23 |
+
dayHigh
|
| 24 |
+
dayLow
|
| 25 |
+
MarketCap
|
| 26 |
+
MarketCapAllClasses
|
| 27 |
+
peRatio
|
| 28 |
+
prevClose
|
| 29 |
+
dividendFrequency
|
| 30 |
+
dividendYield
|
| 31 |
+
dividendAmount
|
| 32 |
+
dividendCurrency
|
| 33 |
+
beta
|
| 34 |
+
eps
|
| 35 |
+
exDividendDate
|
| 36 |
+
shortDescription
|
| 37 |
+
longDescription
|
| 38 |
+
website
|
| 39 |
+
email
|
| 40 |
+
phoneNumber
|
| 41 |
+
fullAddress
|
| 42 |
+
employees
|
| 43 |
+
shareOutStanding
|
| 44 |
+
totalDebtToEquity
|
| 45 |
+
totalSharesOutStanding
|
| 46 |
+
sharesESCROW
|
| 47 |
+
vwap
|
| 48 |
+
dividendPayDate
|
| 49 |
+
weeks52high
|
| 50 |
+
weeks52low
|
| 51 |
+
alpha
|
| 52 |
+
averageVolume10D
|
| 53 |
+
averageVolume30D
|
| 54 |
+
averageVolume50D
|
| 55 |
+
priceToBook
|
| 56 |
+
priceToCashFlow
|
| 57 |
+
returnOnEquity
|
| 58 |
+
returnOnAssets
|
| 59 |
+
day21MovingAvg
|
| 60 |
+
day50MovingAvg
|
| 61 |
+
day200MovingAvg
|
| 62 |
+
dividend3Years
|
| 63 |
+
dividend5Years
|
| 64 |
+
datatype
|
| 65 |
+
issueType
|
| 66 |
+
qmdescription
|
| 67 |
+
}
|
| 68 |
+
}
|
| 69 |
+
"""
|
| 70 |
+
|
| 71 |
+
stock_info_payload = {
|
| 72 |
+
"operationName": "getQuoteBySymbol",
|
| 73 |
+
"variables": {"locale": "en"},
|
| 74 |
+
"query": stock_info_query,
|
| 75 |
+
}
|
| 76 |
+
|
| 77 |
+
get_timeseries_query = """query getTimeSeriesData($symbol: String!, $freq: String, $interval: Int, $start: String, $end: String, $startDateTime: Int, $endDateTime: Int) {
|
| 78 |
+
getTimeSeriesData(
|
| 79 |
+
symbol: $symbol
|
| 80 |
+
freq: $freq
|
| 81 |
+
interval: $interval
|
| 82 |
+
start: $start
|
| 83 |
+
end: $end
|
| 84 |
+
startDateTime: $startDateTime
|
| 85 |
+
endDateTime: $endDateTime
|
| 86 |
+
) {
|
| 87 |
+
dateTime
|
| 88 |
+
open
|
| 89 |
+
high
|
| 90 |
+
low
|
| 91 |
+
close
|
| 92 |
+
volume
|
| 93 |
+
}
|
| 94 |
+
}"""
|
| 95 |
+
|
| 96 |
+
get_timeseries_payload = {
|
| 97 |
+
"operationName": "getTimeSeriesData",
|
| 98 |
+
"variables": {
|
| 99 |
+
"symbol": "BNS",
|
| 100 |
+
"freq": "day",
|
| 101 |
+
"interval": "",
|
| 102 |
+
"start": "2013-09-30",
|
| 103 |
+
"end": "2013-10-31",
|
| 104 |
+
"startDateTime": "",
|
| 105 |
+
"endDateTime": "",
|
| 106 |
+
},
|
| 107 |
+
"query": get_timeseries_query,
|
| 108 |
+
}
|
| 109 |
+
|
| 110 |
+
get_company_price_history_query = """query getCompanyPriceHistory($symbol: String!, $start: String, $end: String, $adjusted: Boolean, $adjustmentType: String, $unadjusted: Boolean, $limit: Int) {
|
| 111 |
+
getCompanyPriceHistory(
|
| 112 |
+
symbol: $symbol
|
| 113 |
+
start: $start
|
| 114 |
+
end: $end
|
| 115 |
+
adjusted: $adjusted
|
| 116 |
+
adjustmentType: $adjustmentType
|
| 117 |
+
unadjusted: $unadjusted
|
| 118 |
+
limit: $limit
|
| 119 |
+
) {
|
| 120 |
+
datetime
|
| 121 |
+
openPrice
|
| 122 |
+
closePrice
|
| 123 |
+
high
|
| 124 |
+
low
|
| 125 |
+
volume
|
| 126 |
+
tradeValue
|
| 127 |
+
numberOfTrade
|
| 128 |
+
change
|
| 129 |
+
changePercent
|
| 130 |
+
vwap
|
| 131 |
+
}
|
| 132 |
+
}"""
|
| 133 |
+
|
| 134 |
+
get_company_price_history_payload = {
|
| 135 |
+
"operationName": "getCompanyPriceHistory",
|
| 136 |
+
"variables": {
|
| 137 |
+
"adjusted": True,
|
| 138 |
+
"adjustmentType": "SO",
|
| 139 |
+
"end": "2023-10-28",
|
| 140 |
+
"start": "2023-10-01",
|
| 141 |
+
"symbol": "BNS",
|
| 142 |
+
"unadjusted": False,
|
| 143 |
+
},
|
| 144 |
+
"query": get_company_price_history_query,
|
| 145 |
+
}
|
| 146 |
+
|
| 147 |
+
get_company_most_recent_trades_query = """query getCompanyMostRecentTrades(
|
| 148 |
+
$symbol: String!
|
| 149 |
+
$limit: Int
|
| 150 |
+
) {
|
| 151 |
+
trades: getCompanyMostRecentTrades(
|
| 152 |
+
symbol: $symbol,
|
| 153 |
+
limit: $limit
|
| 154 |
+
) {
|
| 155 |
+
price
|
| 156 |
+
volume
|
| 157 |
+
datetime
|
| 158 |
+
sellerId
|
| 159 |
+
sellerName
|
| 160 |
+
buyerId
|
| 161 |
+
buyerName
|
| 162 |
+
exchangeCode
|
| 163 |
+
}
|
| 164 |
+
}
|
| 165 |
+
"""
|
| 166 |
+
|
| 167 |
+
get_company_most_recent_trades_payload = {
|
| 168 |
+
"operationName": "getCompanyMostRecentTrades",
|
| 169 |
+
"variables": {
|
| 170 |
+
"symbol": "BNS",
|
| 171 |
+
"limit": 51,
|
| 172 |
+
},
|
| 173 |
+
"query": get_company_most_recent_trades_query,
|
| 174 |
+
}
|
| 175 |
+
|
| 176 |
+
get_company_news_events_query = """query getNewsAndEvents(
|
| 177 |
+
$symbol: String!,
|
| 178 |
+
$page: Int!,
|
| 179 |
+
$limit: Int!,
|
| 180 |
+
$locale: String!
|
| 181 |
+
) {
|
| 182 |
+
news: getNewsForSymbol(
|
| 183 |
+
symbol: $symbol,
|
| 184 |
+
page: $page,
|
| 185 |
+
limit: $limit,
|
| 186 |
+
locale: $locale
|
| 187 |
+
) {
|
| 188 |
+
headline
|
| 189 |
+
datetime
|
| 190 |
+
source
|
| 191 |
+
newsid
|
| 192 |
+
summary
|
| 193 |
+
}
|
| 194 |
+
events: getUpComingEventsForSymbol(symbol: $symbol, locale: $locale) {
|
| 195 |
+
title
|
| 196 |
+
date
|
| 197 |
+
status
|
| 198 |
+
type
|
| 199 |
+
}
|
| 200 |
+
}
|
| 201 |
+
"""
|
| 202 |
+
|
| 203 |
+
get_company_news_events_payload = {
|
| 204 |
+
"operationName": "getNewsAndEvents",
|
| 205 |
+
"variables": {"symbol": "ART", "page": 1, "limit": 100, "locale": "en"},
|
| 206 |
+
"query": get_company_news_events_query,
|
| 207 |
+
}
|
| 208 |
+
|
| 209 |
+
get_company_filings_query = """query getCompanyFilings($symbol: String!, $fromDate: String, $toDate: String, $limit: Int) {
|
| 210 |
+
filings: getCompanyFilings(
|
| 211 |
+
symbol: $symbol
|
| 212 |
+
fromDate: $fromDate
|
| 213 |
+
toDate: $toDate
|
| 214 |
+
limit: $limit
|
| 215 |
+
) {
|
| 216 |
+
size
|
| 217 |
+
filingDate
|
| 218 |
+
description
|
| 219 |
+
name
|
| 220 |
+
urlToPdf
|
| 221 |
+
}
|
| 222 |
+
}"""
|
| 223 |
+
|
| 224 |
+
get_company_filings_payload = {
|
| 225 |
+
"operationName": "getCompanyFilings",
|
| 226 |
+
"variables": {
|
| 227 |
+
"symbol": "AC",
|
| 228 |
+
"fromDate": "2020-09-01",
|
| 229 |
+
"toDate": "2023-09-20",
|
| 230 |
+
"limit": 100,
|
| 231 |
+
},
|
| 232 |
+
"query": get_company_filings_query,
|
| 233 |
+
}
|
| 234 |
+
|
| 235 |
+
historical_dividends_query = """query getDividendsForSymbol(
|
| 236 |
+
$symbol: String!
|
| 237 |
+
$page: Int,
|
| 238 |
+
$batch: Int
|
| 239 |
+
) {
|
| 240 |
+
dividends: getDividendsForSymbol(
|
| 241 |
+
symbol: $symbol
|
| 242 |
+
page: $page
|
| 243 |
+
batch: $batch
|
| 244 |
+
) {
|
| 245 |
+
pageNumber
|
| 246 |
+
hasNextPage
|
| 247 |
+
dividends
|
| 248 |
+
{
|
| 249 |
+
exDate
|
| 250 |
+
amount
|
| 251 |
+
currency
|
| 252 |
+
payableDate
|
| 253 |
+
declarationDate
|
| 254 |
+
recordDate
|
| 255 |
+
}
|
| 256 |
+
}
|
| 257 |
+
}"""
|
| 258 |
+
|
| 259 |
+
historical_dividends_payload = {
|
| 260 |
+
"operationName": "getDividendsForSymbol",
|
| 261 |
+
"variables": {
|
| 262 |
+
"batch": 10,
|
| 263 |
+
"page": 1,
|
| 264 |
+
"symbol": "BNS",
|
| 265 |
+
},
|
| 266 |
+
"query": historical_dividends_query,
|
| 267 |
+
}
|
| 268 |
+
|
| 269 |
+
get_company_analysts_query = """query getCompanyAnalysts(
|
| 270 |
+
$symbol: String!
|
| 271 |
+
$dataType: String,
|
| 272 |
+
) {
|
| 273 |
+
analysts: getCompanyAnalysts(
|
| 274 |
+
datatype: $dataType,
|
| 275 |
+
symbol: $symbol
|
| 276 |
+
) {
|
| 277 |
+
totalAnalysts
|
| 278 |
+
priceTarget
|
| 279 |
+
{
|
| 280 |
+
highPriceTarget
|
| 281 |
+
lowPriceTarget
|
| 282 |
+
priceTarget
|
| 283 |
+
priceTargetUpside
|
| 284 |
+
}
|
| 285 |
+
consensusAnalysts
|
| 286 |
+
{
|
| 287 |
+
consensus
|
| 288 |
+
buy
|
| 289 |
+
sell
|
| 290 |
+
hold
|
| 291 |
+
}
|
| 292 |
+
}
|
| 293 |
+
}"""
|
| 294 |
+
|
| 295 |
+
get_company_analysts_payload = {
|
| 296 |
+
"operationName": "getCompanyAnalysts",
|
| 297 |
+
"variables": {
|
| 298 |
+
"symbol": "BNS",
|
| 299 |
+
"datatype": "equity",
|
| 300 |
+
},
|
| 301 |
+
"query": get_company_analysts_query,
|
| 302 |
+
}
|
| 303 |
+
|
| 304 |
+
get_earnings_date_query = """query getEnhancedEarningsForDate(
|
| 305 |
+
$date: String!
|
| 306 |
+
) {
|
| 307 |
+
getEnhancedEarningsForDate(
|
| 308 |
+
date: $date
|
| 309 |
+
) {
|
| 310 |
+
symbol
|
| 311 |
+
companyName
|
| 312 |
+
announceTime
|
| 313 |
+
estimatedEps
|
| 314 |
+
actualEps
|
| 315 |
+
epsSurprisePercent
|
| 316 |
+
epsSurpriseDollar
|
| 317 |
+
}
|
| 318 |
+
}"""
|
| 319 |
+
|
| 320 |
+
get_earnings_date_payload = {
|
| 321 |
+
"operationName": "getEnhancedEarningsForDate",
|
| 322 |
+
"variables": {
|
| 323 |
+
"date": "2023-10-04",
|
| 324 |
+
},
|
| 325 |
+
"query": get_earnings_date_query,
|
| 326 |
+
}
|
| 327 |
+
|
| 328 |
+
get_index_overview_query = """query getIndexBySymbol(
|
| 329 |
+
$symbol: String!,
|
| 330 |
+
$locale: String
|
| 331 |
+
) {
|
| 332 |
+
getIndexBySymbol(
|
| 333 |
+
symbol: $symbol,
|
| 334 |
+
locale: $locale
|
| 335 |
+
) {
|
| 336 |
+
name
|
| 337 |
+
intro
|
| 338 |
+
overview
|
| 339 |
+
}
|
| 340 |
+
}"""
|
| 341 |
+
|
| 342 |
+
get_index_overview_payload = {
|
| 343 |
+
"operationName": "getIndexBySymbol",
|
| 344 |
+
"variables": {
|
| 345 |
+
"symbol": "^TSX",
|
| 346 |
+
},
|
| 347 |
+
"query": get_index_overview_query,
|
| 348 |
+
}
|
| 349 |
+
|
| 350 |
+
get_index_constituents_query = """query getIndexConstituents(
|
| 351 |
+
$symbol: String!
|
| 352 |
+
) {
|
| 353 |
+
constituents: getIndexConstituents(
|
| 354 |
+
symbol: $symbol
|
| 355 |
+
) {
|
| 356 |
+
symbol
|
| 357 |
+
quotedMarketValue
|
| 358 |
+
longName
|
| 359 |
+
shortName
|
| 360 |
+
weight
|
| 361 |
+
exShortName
|
| 362 |
+
exchange
|
| 363 |
+
exLongName
|
| 364 |
+
}
|
| 365 |
+
keyData: getIndexKeyData(
|
| 366 |
+
symbol: $symbol
|
| 367 |
+
) {
|
| 368 |
+
adjMarketCap
|
| 369 |
+
avgConstituentMarketCap
|
| 370 |
+
numConstituents
|
| 371 |
+
top10HoldingsAdjMarketCap
|
| 372 |
+
ytdPriceReturn
|
| 373 |
+
prevDayPriceReturn
|
| 374 |
+
prevMonthPriceReturn
|
| 375 |
+
prevQuarterPriceReturn
|
| 376 |
+
percentWeightLargestConstituent
|
| 377 |
+
peRatio
|
| 378 |
+
pbRatio
|
| 379 |
+
priceToSales
|
| 380 |
+
divYield
|
| 381 |
+
pcfRatio
|
| 382 |
+
}
|
| 383 |
+
}"""
|
| 384 |
+
|
| 385 |
+
get_index_constituents_payload = {
|
| 386 |
+
"operationName": "getIndexConstituents",
|
| 387 |
+
"variables": {
|
| 388 |
+
"symbol": "^TSX",
|
| 389 |
+
},
|
| 390 |
+
"query": get_index_constituents_query,
|
| 391 |
+
}
|
| 392 |
+
|
| 393 |
+
get_stock_list_query = """query getStockListSymbolsWithQuote(
|
| 394 |
+
$stockListId: String!
|
| 395 |
+
$locale: String,
|
| 396 |
+
) {
|
| 397 |
+
stockList: getStockListSymbolsWithQuote(
|
| 398 |
+
stockListId: $stockListId,
|
| 399 |
+
locale: $locale
|
| 400 |
+
) {
|
| 401 |
+
stockListId
|
| 402 |
+
name
|
| 403 |
+
description
|
| 404 |
+
longDescription
|
| 405 |
+
metricTitle
|
| 406 |
+
listItems
|
| 407 |
+
{
|
| 408 |
+
symbol
|
| 409 |
+
longName
|
| 410 |
+
rank
|
| 411 |
+
metric
|
| 412 |
+
price
|
| 413 |
+
priceChange
|
| 414 |
+
percentChange
|
| 415 |
+
volume
|
| 416 |
+
}
|
| 417 |
+
totalPriceChange
|
| 418 |
+
totalPercentChange
|
| 419 |
+
createdAt
|
| 420 |
+
updatedAt
|
| 421 |
+
}
|
| 422 |
+
}"""
|
| 423 |
+
|
| 424 |
+
get_stock_list_payload = {
|
| 425 |
+
"operationName": "getStockListSymbolsWithQuote",
|
| 426 |
+
"variables": {"locale": "en", "stockListId": "TOP_VOLUME"},
|
| 427 |
+
"query": get_stock_list_query,
|
| 428 |
+
}
|
| 429 |
+
|
| 430 |
+
get_company_insiders_query = """query getCompanyInsidersActivities(
|
| 431 |
+
$symbol: String
|
| 432 |
+
) {
|
| 433 |
+
getCompanyInsidersActivities(
|
| 434 |
+
symbol: $symbol
|
| 435 |
+
) {
|
| 436 |
+
insiderActivities {
|
| 437 |
+
periodkey
|
| 438 |
+
buy {
|
| 439 |
+
name
|
| 440 |
+
trades
|
| 441 |
+
shares
|
| 442 |
+
sharesHeld
|
| 443 |
+
tradeValue
|
| 444 |
+
}
|
| 445 |
+
sell {
|
| 446 |
+
name
|
| 447 |
+
trades
|
| 448 |
+
shares
|
| 449 |
+
sharesHeld
|
| 450 |
+
tradeValue
|
| 451 |
+
}
|
| 452 |
+
}
|
| 453 |
+
activitySummary {
|
| 454 |
+
periodkey
|
| 455 |
+
buyShares
|
| 456 |
+
soldShares
|
| 457 |
+
netActivity
|
| 458 |
+
totalShares
|
| 459 |
+
}
|
| 460 |
+
}
|
| 461 |
+
}"""
|
| 462 |
+
|
| 463 |
+
get_company_insiders_payload = {
|
| 464 |
+
"operationName": "getCompanyInsidersActivities",
|
| 465 |
+
"variables": {
|
| 466 |
+
"symbol": "CNQ",
|
| 467 |
+
},
|
| 468 |
+
"query": get_company_insiders_query,
|
| 469 |
+
}
|
| 470 |
+
|
| 471 |
+
|
| 472 |
+
get_quote_for_symbols_query = """query getQuoteForSymbols($symbols: [String]) {
|
| 473 |
+
getQuoteForSymbols(symbols: $symbols) {
|
| 474 |
+
symbol
|
| 475 |
+
longname
|
| 476 |
+
price
|
| 477 |
+
prevClose
|
| 478 |
+
priceChange
|
| 479 |
+
percentChange
|
| 480 |
+
weeks52high
|
| 481 |
+
weeks52low
|
| 482 |
+
}
|
| 483 |
+
}"""
|
| 484 |
+
|
| 485 |
+
get_quote_for_symbols_payload = {
|
| 486 |
+
"operationName": "getQuoteForSymbols",
|
| 487 |
+
"variables": {
|
| 488 |
+
"symbols": [
|
| 489 |
+
"SRE:US",
|
| 490 |
+
"BWVTF:US",
|
| 491 |
+
"C.P.K:US",
|
| 492 |
+
"BAC.PY:US",
|
| 493 |
+
"BALTF:US",
|
| 494 |
+
"BACRP:US",
|
| 495 |
+
],
|
| 496 |
+
},
|
| 497 |
+
"query": get_quote_for_symbols_query,
|
| 498 |
+
}
|
| 499 |
+
|
| 500 |
+
get_index_price_history_query = """query getIndexPriceHistory($symbol: String!, $start: String, $end: String, $adjusted: Boolean, $adjustmentType: String, $unadjusted: Boolean, $limit: Int) {\n getIndexPriceHistory(\n symbol: $symbol\n start: $start\n end: $end\n adjusted: $adjusted\n adjustmentType: $adjustmentType\n unadjusted: $unadjusted\n limit: $limit\n ) {\n datetime\n openPrice\n closePrice\n high\n low\n volume\n change\n changePercent\n triv\n }\n}"}"""
|
| 501 |
+
|
| 502 |
+
get_index_price_history_payload = {
|
| 503 |
+
"operationName": "getIndexPriceHistory",
|
| 504 |
+
"variables": {
|
| 505 |
+
"symbol": "^TSX",
|
| 506 |
+
"start": "2023-12-01",
|
| 507 |
+
"end": "2023-12-31",
|
| 508 |
+
"adjusted": True,
|
| 509 |
+
"adjustmentType": "SO",
|
| 510 |
+
"unadjusted": False,
|
| 511 |
+
},
|
| 512 |
+
"query": get_index_price_history_query,
|
| 513 |
+
}
|
openbb_platform/providers/tmx/openbb_tmx/utils/helpers.py
ADDED
|
@@ -0,0 +1,1147 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""TMX Helpers Module."""
|
| 2 |
+
|
| 3 |
+
# pylint: disable=too-many-lines,unused-argument,simplifiable-if-expression
|
| 4 |
+
|
| 5 |
+
from datetime import (
|
| 6 |
+
date as dateType,
|
| 7 |
+
datetime,
|
| 8 |
+
time,
|
| 9 |
+
timedelta,
|
| 10 |
+
)
|
| 11 |
+
from typing import TYPE_CHECKING, Any, Dict, List, Literal, Optional, Union
|
| 12 |
+
|
| 13 |
+
from openbb_core.app.model.abstract.error import OpenBBError
|
| 14 |
+
from openbb_tmx.utils import gql
|
| 15 |
+
|
| 16 |
+
if TYPE_CHECKING:
|
| 17 |
+
from aiohttp_client_cache import SQLiteBackend
|
| 18 |
+
from pandas import DataFrame
|
| 19 |
+
|
| 20 |
+
# Column map for ETFs.
|
| 21 |
+
COLUMNS_DICT = {
|
| 22 |
+
"symbol": "symbol",
|
| 23 |
+
"shortname": "short_name",
|
| 24 |
+
"longname": "name",
|
| 25 |
+
"fundfamily": "fund_family",
|
| 26 |
+
"regions": "regions",
|
| 27 |
+
"sectors": "sectors",
|
| 28 |
+
"currency": "currency",
|
| 29 |
+
"inceptiondate": "inception_date",
|
| 30 |
+
"unitprice": "unit_price",
|
| 31 |
+
"prevClose": "prev_close",
|
| 32 |
+
"close": "close",
|
| 33 |
+
"esg": "esg",
|
| 34 |
+
"investmentstyle": "investment_style",
|
| 35 |
+
"avgdailyvolume": "volume_avg_daily",
|
| 36 |
+
"totalreturn1month": "return_1m",
|
| 37 |
+
"totalreturn3month": "return_3m",
|
| 38 |
+
"totalreturn1year": "return_1y",
|
| 39 |
+
"totalreturn3year": "return_3y",
|
| 40 |
+
"totalreturn5year": "return_5y",
|
| 41 |
+
"totalreturnytd": "return_ytd",
|
| 42 |
+
"totalreturnsinceinception": "return_from_inception",
|
| 43 |
+
"distributionyeld": "distribution_yield",
|
| 44 |
+
"dividendfrequency": "dividend_frequency",
|
| 45 |
+
"pricetoearnings": "pe_ratio",
|
| 46 |
+
"pricetobook": "pb_ratio",
|
| 47 |
+
"assetclass": "asset_class_id",
|
| 48 |
+
"prospectobjective": "investment_objectives",
|
| 49 |
+
"beta1y": "beta_1y",
|
| 50 |
+
"beta2y": "beta_2y",
|
| 51 |
+
"beta3y": "beta_3y",
|
| 52 |
+
"beta4y": "beta_4y",
|
| 53 |
+
"beta5y": "beta_5y",
|
| 54 |
+
"beta6y": "beta_6y",
|
| 55 |
+
"beta7y": "beta_7y",
|
| 56 |
+
"beta8y": "beta_8y",
|
| 57 |
+
"beta9y": "beta_9y",
|
| 58 |
+
"beta10y": "beta_10y",
|
| 59 |
+
"beta11y": "beta_11y",
|
| 60 |
+
"beta12y": "beta_12y",
|
| 61 |
+
"beta13y": "beta_13y",
|
| 62 |
+
"beta14y": "beta_14y",
|
| 63 |
+
"beta15y": "beta_15y",
|
| 64 |
+
"beta16y": "beta_16y",
|
| 65 |
+
"beta17y": "beta_17y",
|
| 66 |
+
"beta18y": "beta_18y",
|
| 67 |
+
"beta19y": "beta_19y",
|
| 68 |
+
"beta20y": "beta_20y",
|
| 69 |
+
"avgvol30days": "volume_avg_30d",
|
| 70 |
+
"aum": "aum",
|
| 71 |
+
"top10holdings": "holdings_top10",
|
| 72 |
+
"top10holdingsummary": "holdings_top10_summary",
|
| 73 |
+
"totalreturn6month": "return_6m",
|
| 74 |
+
"totalreturn10year": "return_10y",
|
| 75 |
+
"managementfee": "management_fee",
|
| 76 |
+
"altData": "additional_data",
|
| 77 |
+
}
|
| 78 |
+
|
| 79 |
+
# Additional Indices Supported By TMX for Snapshots Data.
|
| 80 |
+
|
| 81 |
+
NASDAQ_GIDS = {
|
| 82 |
+
"^ADRAI": "BLDRS Asia 50 ADR Index Fund",
|
| 83 |
+
"^ADRDI": "BLDRS Developed Markets 100 ADR Index Fund",
|
| 84 |
+
"^ADREI": "BLDRS Emerging Markets 50 ADR Index Fund",
|
| 85 |
+
"^ASRN": "AlphaSector Rotation Index",
|
| 86 |
+
"^ASRX": "AlphaSector Rotation Total Return Index",
|
| 87 |
+
"^AVSPY": "NASDAQ OMX Alpha AAPL vs. SPY Index",
|
| 88 |
+
"^BIXR": "BetterInvesting 100 Total Return Index",
|
| 89 |
+
"^BIXX": "BetterInvesting 100 Index",
|
| 90 |
+
"^BKX": "KBW Bank Index",
|
| 91 |
+
"^BSCBK": "NASDAQ BulletShares USD Corporate Bond 2020 Index",
|
| 92 |
+
"^BSCBL": "NASDAQ BulletShares USD Corporate Bond 2021 Index",
|
| 93 |
+
"^BSCBM": "NASDAQ BulletShares USD Corporate Bond 2022 Index",
|
| 94 |
+
"^BSCBN": "NASDAQ BulletShares USD Corporate Bond 2023 Index",
|
| 95 |
+
"^BSCBO": "NASDAQ BulletShares USD Corporate Bond 2024 Index",
|
| 96 |
+
"^BSCBP": "NASDAQ BulletShares USD Corporate Bond 2025 Index",
|
| 97 |
+
"^BSJKK": "NASDAQ BulletShares USD High Yield Corporate Bond",
|
| 98 |
+
"^BSJKL": "NASDAQ BulletShares USD High Yield Corporate Bond",
|
| 99 |
+
"^BSJKM": "NASDAQ BulletShares USD High Yield Corporate Bond",
|
| 100 |
+
"^BSJKN": "NASDAQ BulletShares USD High Yield Corporate Bond",
|
| 101 |
+
"^BXN": "CBOE NASDAQ-100 BuyWrite Index",
|
| 102 |
+
"^CELS": "NASDAQ Clean Edge Green Energy Index",
|
| 103 |
+
"^CEXX": "NASDAQ Clean Edge Green Energy Total Return Index",
|
| 104 |
+
"^CHXN": "NASDAQ China Index",
|
| 105 |
+
"^CIX100": "Cryptoindex.com",
|
| 106 |
+
"^CND": "NASDAQ Canada",
|
| 107 |
+
"^COMPX": "NASDAQ Composite",
|
| 108 |
+
"^CVXLF": "NASDAQ OMX Alpha C vs. XLF Index",
|
| 109 |
+
"^DFX": "PHLX Defense Sector",
|
| 110 |
+
"^DIVQ": "NASDAQ Dividend Achievers Index",
|
| 111 |
+
"^DOT": "TheStreet.com Internet Sector",
|
| 112 |
+
"^DTEC": "NASDAQ Dallas Regional Chamber Index",
|
| 113 |
+
"^DVQT": "NASDAQ Dividend Achievers Total Return Index",
|
| 114 |
+
"^DWAFIR": "Dorsey Wright Fixed Income Allocation Index",
|
| 115 |
+
"^DWANQFF": "Dorsey Wright Focus Five Index",
|
| 116 |
+
"^EMCLOUD": "BVP Nasdaq Emerging Cloud Index",
|
| 117 |
+
"^EPX": "SIG Oil Exploration & Production Index",
|
| 118 |
+
"^ABAQ": "ABA Community Bank NASDAQ Index",
|
| 119 |
+
"^EVSPY": "NASDAQ OMX Alpha EEM vs. SPY Index",
|
| 120 |
+
"^GESPY": "NASDAQ OMX Alpha GE vs. SPY Index",
|
| 121 |
+
"^GOOSY": "NASDAQ OMX Alpha GOOG vs. SPY Index",
|
| 122 |
+
"^GVSPY": "NASDAQ OMX Alpha GLD vs. SPY Index",
|
| 123 |
+
"^HAUL": "Wilder NASDAQ OMX Global Energy Efficient Transport Index",
|
| 124 |
+
"^HGX": "PHLX Housing Sector",
|
| 125 |
+
"^IBMSY": "NASDAQ OMX Alpha IBM vs. SPY Index",
|
| 126 |
+
"^ILTI": "NASDAQ OMX AeA Illinois Tech Index",
|
| 127 |
+
"^INTSY": "NASDAQ OMX Alpha INTC vs. SPY Index",
|
| 128 |
+
"^ISRQ": "NASDAQ Israel Index",
|
| 129 |
+
"^ISRX": "NASDAQ Israel Total Return",
|
| 130 |
+
"^IVSPY": "NASDAQ OMX Alpha IBM vs. SPY Index",
|
| 131 |
+
"^IXBK": "NASDAQ Bank",
|
| 132 |
+
"^IXCO": "NASDAQ Computer",
|
| 133 |
+
"^IXF": "NASDAQ Financial",
|
| 134 |
+
"^IXFN": "NASDAQ Other Finance",
|
| 135 |
+
"^IXHC": "NASDAQ Health Care Index",
|
| 136 |
+
"^IXID": "NASDAQ Industrial",
|
| 137 |
+
"^IXIS": "NASDAQ Insurance",
|
| 138 |
+
"^IXTC": "NASDAQ Telecommunications",
|
| 139 |
+
"^IXTR": "NASDAQ Transportation",
|
| 140 |
+
"^JVSPY": "NASDAQ OMX Alpha INTC vs. SPY Index",
|
| 141 |
+
"^KRX": "KBW Regional Banking Index",
|
| 142 |
+
"^LVSPY": "NASDAQ OMX Alpha GE vs. SPY Index",
|
| 143 |
+
"^MFX": "KBW Mortgage Finance Index",
|
| 144 |
+
"^MRKSY": "NASDAQ OMX Alpha MRK vs. SPY Index",
|
| 145 |
+
"^MSH": "Morgan Stanley Technology index",
|
| 146 |
+
"^MXZ": "PHLX Medical Device Sector",
|
| 147 |
+
"^NBI": "NASDAQ Biotechnology",
|
| 148 |
+
"^NBIE": "NASDAQ Biotechnology Equal Weighted Index",
|
| 149 |
+
"^NBIJR": "Nasdaq Junior Biotechnology Index",
|
| 150 |
+
"^NCI": "Nasdaq Crypto Index",
|
| 151 |
+
"^NDX": "NASDAQ 100 Index",
|
| 152 |
+
"^NDXE": "The NASDAQ-100 Equal Weighted Index",
|
| 153 |
+
"^NDXT": "NASDAQ-100 Technology Sector Index",
|
| 154 |
+
"^NDXX": "NASDAQ-100 Ex-Tech Sector Index",
|
| 155 |
+
"^NEUX": "NASDAQ OMX Europe Index",
|
| 156 |
+
"^NGX": "Nasdaq Next Generation 100 Index",
|
| 157 |
+
"^NQ7HANDLTL": "Nasdaq 7HANDL Index",
|
| 158 |
+
"^NQCICLER": "NASDAQ Commodity Crude Oil Index ER",
|
| 159 |
+
"^NQCIGCER": "NASDAQ Commodity Gold Index ER",
|
| 160 |
+
"^NQCIHGER": "NASDAQ Commodity HG Copper Index ER",
|
| 161 |
+
"^NQCINGER": "NASDAQ Commodity Natural Gas Index ER",
|
| 162 |
+
"^NQCISIER": "NASDAQ Commodity Silver Index ER",
|
| 163 |
+
"^NQCYBRT": "Nasdaq CTA Cybersecurity Index",
|
| 164 |
+
"^NQGM": "NASDAQ Global Market Composite",
|
| 165 |
+
"^NQGS": "NASDAQ Global Select Market Composite",
|
| 166 |
+
"^NQH2O": "Nasdaq Veles California Water Index",
|
| 167 |
+
"^NQMGUSL": "Nasdaq US Mega Cap Select Leaders Index",
|
| 168 |
+
"^NQVWLCCT": "Nasdaq Victory US 500 Large Vol Wt L/C TR",
|
| 169 |
+
"^NQVWLCT": "Nasdaq Victory US 500 Large Vol Wt TR",
|
| 170 |
+
"^NQVWLDCT": "Nasdaq Victory US 100 Large High Div Vol Wt L/C TR",
|
| 171 |
+
"^NQX": "NASDAQ-100 Reduced Value Index",
|
| 172 |
+
"^NVSPY": "NASDAQ OMX Alpha MRK vs. SPY Index",
|
| 173 |
+
"^NXTQ": "NASDAQ Q-50",
|
| 174 |
+
"^OMXB10": "OMX Baltic 10",
|
| 175 |
+
"^OMXC20": "OMX Copenhagen 20",
|
| 176 |
+
"^OMXH25": "OMX Helsinki 25",
|
| 177 |
+
"^OMXN40": "OMX Nordic 40",
|
| 178 |
+
"^OMXS30": "OMX Stockholm 30 Index",
|
| 179 |
+
"^ONEQI": "Fidelity Nasdaq Composite Index Tracking Stock",
|
| 180 |
+
"^OSX": "PHLX Oil Service Sector",
|
| 181 |
+
"^PRFEI": "PowerShares FTSE RAFI Energy Sector Portfolio",
|
| 182 |
+
"^PRFFI": "PowerShares FTSE RAFI Financials Sector Portfolio",
|
| 183 |
+
"^PRFGI": "PowerShares FTSE RAFI Consumer Goods Sector Portfolio",
|
| 184 |
+
"^PRFHI": "PowerShares FTSE RAFI Health Care Sector Portfolio",
|
| 185 |
+
"^PRFMI": "PowerShares FTSE RAFI Basic Materials Sector Portfolio",
|
| 186 |
+
"^PRFNI": "PowerShares FTSE RAFI Industrials Sector Portfolio",
|
| 187 |
+
"^PRFQI": "PowerShares FTSE RAFI Telecom & Tech Sector Portfolio",
|
| 188 |
+
"^PRFSI": "PowerShares FTSE RAFI Consumer Goods Sector Portfolio",
|
| 189 |
+
"^PRFUI": "PowerShares FTSE RAFI Utilities Sector Portfolio",
|
| 190 |
+
"^PRFZI": "PowerShares FTSE RAFI US 1500 Small-Mid Portfolio",
|
| 191 |
+
"^QAGR": "NASDAQ OMX Global Agriculture Index",
|
| 192 |
+
"^QCLNI": "First Trust NASDAQ Clean Edge U.S. Liquid Series",
|
| 193 |
+
"^QCOL": "NASDAQ OMX Global Coal Index",
|
| 194 |
+
"^QGLD": "NASDAQ OMX Global Gold & Precious Metals Index",
|
| 195 |
+
"^QGRI": "NASDAQ OMX Government Relief Index",
|
| 196 |
+
"^QIRL": "NASDAQ OMX Ireland Index",
|
| 197 |
+
"^QIV": "NASDAQ 100 After Hours Indicator",
|
| 198 |
+
"^QMEA": "NASDAQ OMX Middle East North Africa Index",
|
| 199 |
+
"^QMI": "NASDAQ 100 Pre Market Indicator",
|
| 200 |
+
"^QNET": "NASDAQ Internet Index",
|
| 201 |
+
"^QOMX": "NASDAQ OMX 100 Index",
|
| 202 |
+
"^QQEWI": "First Trust NASDAQ 100 Equal Weighted Index Fund",
|
| 203 |
+
"^NOCO": "NASDAQ OMX Carbon Excess Return Index",
|
| 204 |
+
"^QQXTI": "First Trust NASDAQ 100 Ex-Technology Sector",
|
| 205 |
+
"^QSTL": "NASDAQ OMX Global Steel Index",
|
| 206 |
+
"^QTECI": "First Trust NASDAQ 100 Technology Sector",
|
| 207 |
+
"^QWND": "NASDAQ OMX Clean Edge Global Wind Energy Index",
|
| 208 |
+
"^RCMP": "NASDAQ Capital Market Composite Index",
|
| 209 |
+
"^RXS": "PHLX Drug Sector",
|
| 210 |
+
"^SHX": "PHLX Marine Shipping Sector",
|
| 211 |
+
"^SOX": "PHLX Semiconductor Sector",
|
| 212 |
+
"^SRVRSCPR": "Kelly Data Center and Tech Infrastructure Index",
|
| 213 |
+
"^SVO": "SIG Energy MLP Index",
|
| 214 |
+
"^TRAN": "Dow Transportation",
|
| 215 |
+
"^TVSPY": "NASDAQ OMX Alpha TLT vs. SPY Index",
|
| 216 |
+
"^UTY": "PHLX Utility Sector",
|
| 217 |
+
"^UVSPY": "NASDAQ OMX Alpha GOOG vs. SPY Index",
|
| 218 |
+
"^VOLNDX": "Volatility NASDAQ - 100",
|
| 219 |
+
"^VOLQ": "Nasdaq-100 Volatility Index",
|
| 220 |
+
"^WMTSY": "NASDAQ OMX Alpha WMT vs. SPY Index",
|
| 221 |
+
"^WVSPY": "NASDAQ OMX Alpha WMT vs. SPY Index",
|
| 222 |
+
"^XAU": "PHLX Gold/Silver Sector",
|
| 223 |
+
"^XCM": "PHLX Chemicals Sector",
|
| 224 |
+
"^XEX": "PHLX Europe Sector",
|
| 225 |
+
"^XND": "Nasdaq-100 Micro Index",
|
| 226 |
+
}
|
| 227 |
+
|
| 228 |
+
|
| 229 |
+
def get_random_agent() -> str:
|
| 230 |
+
"""Get a random user agent."""
|
| 231 |
+
# pylint: disable=import-outside-toplevel
|
| 232 |
+
from random_user_agent.user_agent import UserAgent
|
| 233 |
+
|
| 234 |
+
user_agent_rotator = UserAgent(limit=100)
|
| 235 |
+
user_agent = user_agent_rotator.get_random_user_agent()
|
| 236 |
+
return user_agent
|
| 237 |
+
|
| 238 |
+
|
| 239 |
+
def get_companies_backend():
|
| 240 |
+
"""Get the SQLiteBackend for the TMX companies."""
|
| 241 |
+
# pylint: disable=import-outside-toplevel
|
| 242 |
+
from aiohttp_client_cache import SQLiteBackend # noqa
|
| 243 |
+
from openbb_core.app.utils import get_user_cache_directory # noqa
|
| 244 |
+
|
| 245 |
+
# Only used for obtaining the directory of all valid company tickers.
|
| 246 |
+
tmx_companies_backend = SQLiteBackend(
|
| 247 |
+
f"{get_user_cache_directory()}/http/tmx_companies",
|
| 248 |
+
expire_after=timedelta(days=2),
|
| 249 |
+
)
|
| 250 |
+
|
| 251 |
+
return tmx_companies_backend
|
| 252 |
+
|
| 253 |
+
|
| 254 |
+
def get_indices_backend():
|
| 255 |
+
"""Get the SQLiteBackend for the TMX indices."""
|
| 256 |
+
# pylint: disable=import-outside-toplevel
|
| 257 |
+
from aiohttp_client_cache import SQLiteBackend # noqa
|
| 258 |
+
from openbb_core.app.utils import get_user_cache_directory # noqa
|
| 259 |
+
|
| 260 |
+
# Only used for obtaining the directory of all valid indices.
|
| 261 |
+
tmx_indices_backend = SQLiteBackend(
|
| 262 |
+
f"{get_user_cache_directory()}/http/tmx_indices", expire_after=timedelta(days=1)
|
| 263 |
+
)
|
| 264 |
+
|
| 265 |
+
return tmx_indices_backend
|
| 266 |
+
|
| 267 |
+
|
| 268 |
+
async def response_callback(response, _: Any):
|
| 269 |
+
"""Use callback for HTTP Client Response."""
|
| 270 |
+
content_type = response.headers.get("Content-Type", "")
|
| 271 |
+
if "application/json" in content_type:
|
| 272 |
+
return await response.json()
|
| 273 |
+
if "text" in content_type:
|
| 274 |
+
return await response.text()
|
| 275 |
+
return await response.read()
|
| 276 |
+
|
| 277 |
+
|
| 278 |
+
async def get_data_from_url(
|
| 279 |
+
url: str,
|
| 280 |
+
use_cache: bool = True,
|
| 281 |
+
backend: Optional["SQLiteBackend"] = None,
|
| 282 |
+
**kwargs: Any,
|
| 283 |
+
) -> Any:
|
| 284 |
+
"""Make an asynchronous HTTP request to a static file."""
|
| 285 |
+
# pylint: disable=import-outside-toplevel
|
| 286 |
+
from aiohttp_client_cache.session import CachedSession
|
| 287 |
+
from openbb_core.provider.utils.helpers import amake_request
|
| 288 |
+
|
| 289 |
+
data: Any = None
|
| 290 |
+
if use_cache is True:
|
| 291 |
+
async with CachedSession(cache=backend) as cached_session:
|
| 292 |
+
try:
|
| 293 |
+
response = await cached_session.get(url, **kwargs)
|
| 294 |
+
data = await response_callback(response, None)
|
| 295 |
+
finally:
|
| 296 |
+
await cached_session.close()
|
| 297 |
+
else:
|
| 298 |
+
data = await amake_request(url, response_callback=response_callback, timeout=20)
|
| 299 |
+
|
| 300 |
+
return data
|
| 301 |
+
|
| 302 |
+
|
| 303 |
+
async def get_data_from_gql(url: str, headers, data, **kwargs: Any) -> Any:
|
| 304 |
+
"""Make an asynchronous GraphQL request."""
|
| 305 |
+
# pylint: disable=import-outside-toplevel
|
| 306 |
+
from openbb_core.provider.utils.helpers import amake_request
|
| 307 |
+
|
| 308 |
+
response = await amake_request(
|
| 309 |
+
url=url,
|
| 310 |
+
method="POST",
|
| 311 |
+
response_callback=response_callback,
|
| 312 |
+
headers=headers,
|
| 313 |
+
data=data,
|
| 314 |
+
timeout=30,
|
| 315 |
+
)
|
| 316 |
+
|
| 317 |
+
return response
|
| 318 |
+
|
| 319 |
+
|
| 320 |
+
def replace_values_in_list_of_dicts(data):
|
| 321 |
+
"""Replace "NA" and "-" with None in a list of dictionaries."""
|
| 322 |
+
for d in data:
|
| 323 |
+
for k, v in d.items():
|
| 324 |
+
if isinstance(v, dict):
|
| 325 |
+
replace_values_in_list_of_dicts([v]) # Recurse into nested dictionary
|
| 326 |
+
elif isinstance(v, list):
|
| 327 |
+
for i in range(len(v)): # pylint: disable=C0200
|
| 328 |
+
if isinstance(v[i], dict):
|
| 329 |
+
replace_values_in_list_of_dicts(
|
| 330 |
+
[v[i]]
|
| 331 |
+
) # Recurse into nested dictionary in list
|
| 332 |
+
elif v[i] in ("NA", "-"):
|
| 333 |
+
v[i] = None # Replace "NA" and "-" with None
|
| 334 |
+
elif v in ("NA", "-"):
|
| 335 |
+
d[k] = None # Replace "NA" and "-" with None
|
| 336 |
+
return data
|
| 337 |
+
|
| 338 |
+
|
| 339 |
+
def check_weekday(date) -> str:
|
| 340 |
+
"""Check if the input date is a weekday, and if not, returns the next weekday.
|
| 341 |
+
|
| 342 |
+
Parameters
|
| 343 |
+
----------
|
| 344 |
+
date: str
|
| 345 |
+
The date to check in YYYY-MM-DD format.
|
| 346 |
+
|
| 347 |
+
Returns
|
| 348 |
+
-------
|
| 349 |
+
str
|
| 350 |
+
Date in YYYY-MM-DD format. If the date is a weekend, returns the date of the next weekday.
|
| 351 |
+
"""
|
| 352 |
+
# pylint: disable=import-outside-toplevel
|
| 353 |
+
from pandas import to_datetime
|
| 354 |
+
from pandas.tseries.holiday import next_workday
|
| 355 |
+
|
| 356 |
+
if to_datetime(date).weekday() > 4:
|
| 357 |
+
return next_workday(to_datetime(date)).strftime("%Y-%m-%d")
|
| 358 |
+
return date
|
| 359 |
+
|
| 360 |
+
|
| 361 |
+
async def get_all_etfs(use_cache: bool = True) -> List[Dict]:
|
| 362 |
+
"""Get a summary of the TMX ETF universe.
|
| 363 |
+
|
| 364 |
+
Returns
|
| 365 |
+
-------
|
| 366 |
+
Dict
|
| 367 |
+
Dictionary with all TMX-listed ETFs.
|
| 368 |
+
"""
|
| 369 |
+
# pylint: disable=import-outside-toplevel
|
| 370 |
+
from aiohttp_client_cache import SQLiteBackend # noqa
|
| 371 |
+
from openbb_core.app.utils import get_user_cache_directory # noqa
|
| 372 |
+
from pandas import DataFrame # noqa
|
| 373 |
+
|
| 374 |
+
# Only used for obtaining the all ETFs JSON file.
|
| 375 |
+
tmx_etfs_backend = SQLiteBackend(
|
| 376 |
+
f"{get_user_cache_directory()}/http/tmx_etfs", expire_after=timedelta(hours=4)
|
| 377 |
+
)
|
| 378 |
+
|
| 379 |
+
url = "https://dgr53wu9i7rmp.cloudfront.net/etfs/etfs.json"
|
| 380 |
+
|
| 381 |
+
response = await get_data_from_url(
|
| 382 |
+
url, use_cache=use_cache, backend=tmx_etfs_backend
|
| 383 |
+
)
|
| 384 |
+
|
| 385 |
+
if not response or response is None:
|
| 386 |
+
raise OpenBBError("There was a problem with the request. Could not get ETFs.")
|
| 387 |
+
|
| 388 |
+
response = replace_values_in_list_of_dicts(response)
|
| 389 |
+
|
| 390 |
+
etfs = DataFrame(response).rename(columns=COLUMNS_DICT)
|
| 391 |
+
|
| 392 |
+
etfs = etfs.drop(
|
| 393 |
+
columns=[
|
| 394 |
+
"beta_2y",
|
| 395 |
+
"beta_4y",
|
| 396 |
+
"beta_6y",
|
| 397 |
+
"beta_7y",
|
| 398 |
+
"beta_8y",
|
| 399 |
+
"beta_9y",
|
| 400 |
+
"beta_11y",
|
| 401 |
+
"beta_12y",
|
| 402 |
+
"beta_13y",
|
| 403 |
+
"beta_14y",
|
| 404 |
+
"beta_16y",
|
| 405 |
+
"beta_17y",
|
| 406 |
+
"beta_18y",
|
| 407 |
+
"beta_19y",
|
| 408 |
+
]
|
| 409 |
+
)
|
| 410 |
+
|
| 411 |
+
for i in etfs.index:
|
| 412 |
+
etfs.loc[i, "fund_family"] = etfs.loc[i, "additional_data"].get("fundfamilyen", None) # type: ignore
|
| 413 |
+
etfs.loc[i, "website"] = etfs.loc[i, "additional_data"].get("websitefactsheeten", None) # type: ignore
|
| 414 |
+
etfs.loc[i, "mer"] = etfs.loc[i, "additional_data"].get("mer", None) # type: ignore
|
| 415 |
+
etfs = etfs.fillna("N/A").replace("N/A", None)
|
| 416 |
+
|
| 417 |
+
return etfs.to_dict(orient="records")
|
| 418 |
+
|
| 419 |
+
|
| 420 |
+
async def get_tmx_tickers(
|
| 421 |
+
exchange: Literal["tsx", "tsxv"] = "tsx", use_cache: bool = True
|
| 422 |
+
) -> Dict:
|
| 423 |
+
"""Get a dictionary of either TSX or TSX-V symbols and names."""
|
| 424 |
+
# pylint: disable=import-outside-toplevel
|
| 425 |
+
from pandas import DataFrame
|
| 426 |
+
|
| 427 |
+
tsx_json_url = "https://www.tsx.com/json/company-directory/search"
|
| 428 |
+
url = f"{tsx_json_url}/{exchange}/*"
|
| 429 |
+
response = await get_data_from_url(
|
| 430 |
+
url, use_cache=use_cache, backend=get_companies_backend()
|
| 431 |
+
)
|
| 432 |
+
data = (
|
| 433 |
+
DataFrame.from_records(response["results"])[["symbol", "name"]]
|
| 434 |
+
.set_index("symbol")
|
| 435 |
+
.sort_index()
|
| 436 |
+
)
|
| 437 |
+
results = data.to_dict()["name"]
|
| 438 |
+
return results
|
| 439 |
+
|
| 440 |
+
|
| 441 |
+
async def get_all_tmx_companies(use_cache: bool = True) -> Dict:
|
| 442 |
+
"""Merge TSX and TSX-V listings into a single dictionary."""
|
| 443 |
+
all_tmx = {}
|
| 444 |
+
tsx_tickers = await get_tmx_tickers(use_cache=use_cache)
|
| 445 |
+
tsxv_tickers = await get_tmx_tickers("tsxv", use_cache=use_cache)
|
| 446 |
+
all_tmx.update(tsxv_tickers)
|
| 447 |
+
all_tmx.update(tsx_tickers)
|
| 448 |
+
return all_tmx
|
| 449 |
+
|
| 450 |
+
|
| 451 |
+
async def get_all_options_tickers(use_cache: bool = True) -> "DataFrame":
|
| 452 |
+
"""Return a DataFrame with all valid ticker symbols."""
|
| 453 |
+
# pylint: disable=import-outside-toplevel
|
| 454 |
+
from io import StringIO # noqa
|
| 455 |
+
from pandas import concat, read_html # noqa
|
| 456 |
+
from openbb_core.provider.utils.helpers import to_snake_case # noqa
|
| 457 |
+
|
| 458 |
+
url = "https://www.m-x.ca/en/trading/data/options-list"
|
| 459 |
+
|
| 460 |
+
r = await get_data_from_url(
|
| 461 |
+
url, use_cache=use_cache, backend=get_companies_backend()
|
| 462 |
+
)
|
| 463 |
+
|
| 464 |
+
if r is None or r == []:
|
| 465 |
+
raise OpenBBError("Error with the request") # mypy: ignore
|
| 466 |
+
|
| 467 |
+
options_listings = read_html(StringIO(r))
|
| 468 |
+
listings = concat(options_listings)
|
| 469 |
+
listings = listings.set_index("Option Symbol").drop_duplicates().sort_index()
|
| 470 |
+
symbols = listings[:-1]
|
| 471 |
+
symbols = symbols.fillna(value="")
|
| 472 |
+
symbols["Underlying Symbol"] = (
|
| 473 |
+
symbols["Underlying Symbol"].str.replace(" u", ".UN").str.replace("––", "")
|
| 474 |
+
)
|
| 475 |
+
symbols = symbols.reset_index()
|
| 476 |
+
symbols.columns = [
|
| 477 |
+
to_snake_case(col).replace("name_of_", "") for col in symbols.columns
|
| 478 |
+
]
|
| 479 |
+
|
| 480 |
+
return symbols.set_index("option_symbol")
|
| 481 |
+
|
| 482 |
+
|
| 483 |
+
async def get_current_options(symbol: str, use_cache: bool = True) -> "DataFrame":
|
| 484 |
+
"""Get the current quotes for the complete options chain."""
|
| 485 |
+
# pylint: disable=import-outside-toplevel
|
| 486 |
+
from io import StringIO # noqa
|
| 487 |
+
from pandas import DataFrame, DatetimeIndex, concat, read_html, to_datetime # noqa
|
| 488 |
+
from openbb_core.provider.utils.helpers import to_snake_case # noqa
|
| 489 |
+
|
| 490 |
+
SYMBOLS = await get_all_options_tickers(use_cache=use_cache)
|
| 491 |
+
data = DataFrame()
|
| 492 |
+
symbol = symbol.upper()
|
| 493 |
+
|
| 494 |
+
# Remove exchange identifiers from the symbol.
|
| 495 |
+
symbol = symbol.upper().replace("-", ".").replace(".TO", "").replace(".TSX", "")
|
| 496 |
+
# Underlying symbol may have a different ticker symbol than the ticker used to lookup options.
|
| 497 |
+
if len(SYMBOLS[SYMBOLS["underlying_symbol"].str.contains(symbol)]) == 1:
|
| 498 |
+
symbol = SYMBOLS[SYMBOLS["underlying_symbol"] == symbol].index.values[0]
|
| 499 |
+
# Check if the symbol has options trading.
|
| 500 |
+
if symbol not in SYMBOLS.index and not SYMBOLS.empty:
|
| 501 |
+
raise OpenBBError(
|
| 502 |
+
f"The symbol, {symbol}, is not a valid listing or does not trade options."
|
| 503 |
+
)
|
| 504 |
+
|
| 505 |
+
QUOTES_URL = f"https://www.m-x.ca/en/trading/data/quotes?symbol={symbol}"
|
| 506 |
+
|
| 507 |
+
cols = [
|
| 508 |
+
"expiration",
|
| 509 |
+
"strike",
|
| 510 |
+
"bid",
|
| 511 |
+
"ask",
|
| 512 |
+
"lastTradePrice",
|
| 513 |
+
"change",
|
| 514 |
+
"openInterest",
|
| 515 |
+
"volume",
|
| 516 |
+
"optionType",
|
| 517 |
+
]
|
| 518 |
+
|
| 519 |
+
r = await get_data_from_url(QUOTES_URL, use_cache=False)
|
| 520 |
+
data = read_html(StringIO(r))[0]
|
| 521 |
+
data = data.iloc[:-1]
|
| 522 |
+
|
| 523 |
+
expirations = (
|
| 524 |
+
data["Unnamed: 0_level_0"]["Expiry date"].astype(str).rename("expiration")
|
| 525 |
+
)
|
| 526 |
+
|
| 527 |
+
expirations = expirations.str.strip("(Weekly)")
|
| 528 |
+
|
| 529 |
+
strikes = (
|
| 530 |
+
data["Unnamed: 7_level_0"]
|
| 531 |
+
.dropna()
|
| 532 |
+
.sort_values("Strike") # type: ignore
|
| 533 |
+
.rename(columns={"Strike": "strike"})
|
| 534 |
+
)
|
| 535 |
+
|
| 536 |
+
calls = concat([expirations, strikes, data["Calls"]], axis=1)
|
| 537 |
+
calls["expiration"] = DatetimeIndex(calls["expiration"]).astype(str)
|
| 538 |
+
calls["optionType"] = "call"
|
| 539 |
+
calls.columns = cols
|
| 540 |
+
calls = calls.set_index(["expiration", "strike", "optionType"])
|
| 541 |
+
|
| 542 |
+
puts = concat([expirations, strikes, data["Puts"]], axis=1)
|
| 543 |
+
puts["expiration"] = DatetimeIndex(puts["expiration"]).astype(str)
|
| 544 |
+
puts["optionType"] = "put"
|
| 545 |
+
puts.columns = cols
|
| 546 |
+
puts = puts.set_index(["expiration", "strike", "optionType"])
|
| 547 |
+
|
| 548 |
+
chains = concat([calls, puts])
|
| 549 |
+
chains["openInterest"] = chains["openInterest"].astype("int64")
|
| 550 |
+
chains["volume"] = chains["volume"].astype("int64")
|
| 551 |
+
chains["change"] = chains["change"].astype(float)
|
| 552 |
+
chains["lastTradePrice"] = chains["lastTradePrice"].astype(float)
|
| 553 |
+
chains["bid"] = chains["bid"].astype(float)
|
| 554 |
+
chains["ask"] = chains["ask"].astype(float)
|
| 555 |
+
chains = chains.sort_index()
|
| 556 |
+
chains = chains.reset_index()
|
| 557 |
+
now = datetime.now()
|
| 558 |
+
temp = DatetimeIndex(chains.expiration)
|
| 559 |
+
temp_ = (temp - now).days + 1 # type: ignore
|
| 560 |
+
chains["dte"] = temp_
|
| 561 |
+
|
| 562 |
+
# Create the standardized contract symbol.
|
| 563 |
+
_strikes = chains["strike"]
|
| 564 |
+
strikes = []
|
| 565 |
+
for _strike in _strikes:
|
| 566 |
+
_strike = str(_strike).split(".")
|
| 567 |
+
front = "0" * (5 - len(_strike[0]))
|
| 568 |
+
back = "0" * (3 - len(_strike[1]))
|
| 569 |
+
strike = f"{front}{_strike[0]}{_strike[1]}{back}"
|
| 570 |
+
strikes.append(str(strike))
|
| 571 |
+
|
| 572 |
+
chains["strikes"] = strikes
|
| 573 |
+
chains["contract_symbol"] = (
|
| 574 |
+
symbol
|
| 575 |
+
+ " " * (6 - len(symbol))
|
| 576 |
+
+ to_datetime(chains["expiration"]).dt.strftime("%y%m%d")
|
| 577 |
+
+ (chains["optionType"].replace("call", "C").replace("put", "P"))
|
| 578 |
+
+ chains["strikes"]
|
| 579 |
+
)
|
| 580 |
+
chains.drop(columns=["strikes"], inplace=True)
|
| 581 |
+
|
| 582 |
+
chains.columns = [to_snake_case(c) for c in chains.columns.to_list()]
|
| 583 |
+
|
| 584 |
+
return chains
|
| 585 |
+
|
| 586 |
+
|
| 587 |
+
async def download_eod_chains(
|
| 588 |
+
symbol: str, date: Optional[dateType] = None, use_cache: bool = False
|
| 589 |
+
) -> "DataFrame":
|
| 590 |
+
"""Download EOD chains data for a given symbol and date."""
|
| 591 |
+
# pylint: disable=import-outside-toplevel
|
| 592 |
+
from io import StringIO # noqa
|
| 593 |
+
import exchange_calendars as xcals # noqa
|
| 594 |
+
from pandas import DatetimeIndex, Timedelta, read_csv, to_datetime # noqa
|
| 595 |
+
from openbb_core.provider.utils.helpers import to_snake_case # noqa
|
| 596 |
+
|
| 597 |
+
symbol = symbol.upper()
|
| 598 |
+
SYMBOLS = await get_all_options_tickers(use_cache=False)
|
| 599 |
+
# Remove echange identifiers from the symbol.
|
| 600 |
+
symbol = symbol.upper().replace("-", ".").replace(".TO", "").replace(".TSX", "")
|
| 601 |
+
|
| 602 |
+
# Underlying symbol may have a different ticker symbol than the ticker used to lookup options.
|
| 603 |
+
if len(SYMBOLS[SYMBOLS["underlying_symbol"].str.contains(symbol)]) == 1:
|
| 604 |
+
symbol = SYMBOLS[SYMBOLS["underlying_symbol"] == symbol].index.values[0]
|
| 605 |
+
# Check if the symbol has options trading.
|
| 606 |
+
if symbol not in SYMBOLS.index and not SYMBOLS.empty:
|
| 607 |
+
raise OpenBBError(
|
| 608 |
+
f"The symbol, {symbol}, is not a valid listing or does not trade options."
|
| 609 |
+
)
|
| 610 |
+
|
| 611 |
+
BASE_URL = "https://www.m-x.ca/en/trading/data/historical?symbol="
|
| 612 |
+
|
| 613 |
+
cal = xcals.get_calendar("XTSE")
|
| 614 |
+
|
| 615 |
+
if date is None:
|
| 616 |
+
EOD_URL = BASE_URL + f"{symbol}" "&dnld=1#quotes"
|
| 617 |
+
else:
|
| 618 |
+
date = check_weekday(date) # type: ignore
|
| 619 |
+
if cal.is_session(date) is False: # type: ignore
|
| 620 |
+
date = (to_datetime(date) + timedelta(days=1)).strftime("%Y-%m-%d") # type: ignore
|
| 621 |
+
date = check_weekday(date) # type: ignore
|
| 622 |
+
if cal.is_session(date=date) is False: # type: ignore
|
| 623 |
+
date = (to_datetime(date) + timedelta(days=1)).strftime("%Y-%m-%d") # type: ignore
|
| 624 |
+
|
| 625 |
+
EOD_URL = (
|
| 626 |
+
BASE_URL + f"{symbol}" "&from=" f"{date}" "&to=" f"{date}" "&dnld=1#quotes"
|
| 627 |
+
)
|
| 628 |
+
|
| 629 |
+
r = await get_data_from_url(EOD_URL, use_cache=use_cache) # type: ignore
|
| 630 |
+
|
| 631 |
+
if r is None:
|
| 632 |
+
raise OpenBBError("Error with the request, no data was returned.")
|
| 633 |
+
|
| 634 |
+
data = read_csv(StringIO(r))
|
| 635 |
+
if data.empty:
|
| 636 |
+
raise OpenBBError(
|
| 637 |
+
f"No data found for, {symbol}, on, {date}."
|
| 638 |
+
"The symbol may not have been listed, or traded options, before that date."
|
| 639 |
+
)
|
| 640 |
+
|
| 641 |
+
data["contractSymbol"] = data["Symbol"]
|
| 642 |
+
|
| 643 |
+
data["optionType"] = data["Call/Put"].replace(0, "call").replace(1, "put")
|
| 644 |
+
|
| 645 |
+
data = data.drop(
|
| 646 |
+
columns=[
|
| 647 |
+
"Symbol",
|
| 648 |
+
"Class Symbol",
|
| 649 |
+
"Root Symbol",
|
| 650 |
+
"Underlying Symbol",
|
| 651 |
+
"Ins. Type",
|
| 652 |
+
"Call/Put",
|
| 653 |
+
]
|
| 654 |
+
)
|
| 655 |
+
|
| 656 |
+
cols = [
|
| 657 |
+
"eod_date",
|
| 658 |
+
"strike",
|
| 659 |
+
"expiration",
|
| 660 |
+
"closeBid",
|
| 661 |
+
"closeAsk",
|
| 662 |
+
"closeBidSize",
|
| 663 |
+
"closeAskSize",
|
| 664 |
+
"lastTradePrice",
|
| 665 |
+
"volume",
|
| 666 |
+
"prevClose",
|
| 667 |
+
"change",
|
| 668 |
+
"open",
|
| 669 |
+
"high",
|
| 670 |
+
"low",
|
| 671 |
+
"totalValue",
|
| 672 |
+
"transactions",
|
| 673 |
+
"settlementPrice",
|
| 674 |
+
"openInterest",
|
| 675 |
+
"impliedVolatility",
|
| 676 |
+
"contractSymbol",
|
| 677 |
+
"optionType",
|
| 678 |
+
]
|
| 679 |
+
|
| 680 |
+
data.columns = cols
|
| 681 |
+
data["underlying_symbol"] = symbol + ":CA"
|
| 682 |
+
data["expiration"] = to_datetime(data["expiration"], format="%Y-%m-%d")
|
| 683 |
+
data["eod_date"] = to_datetime(data["eod_date"], format="%Y-%m-%d")
|
| 684 |
+
data["impliedVolatility"] = 0.01 * data["impliedVolatility"]
|
| 685 |
+
|
| 686 |
+
date_ = data["eod_date"]
|
| 687 |
+
temp = DatetimeIndex(data.expiration)
|
| 688 |
+
temp_ = temp - date_ # type: ignore
|
| 689 |
+
data["dte"] = [Timedelta(_temp_).days for _temp_ in temp_]
|
| 690 |
+
data = data.set_index(["expiration", "strike", "optionType"]).sort_index()
|
| 691 |
+
data["eod_date"] = data["eod_date"].astype(str)
|
| 692 |
+
underlying_price = data.iloc[-1]["lastTradePrice"]
|
| 693 |
+
data["underlyingPrice"] = underlying_price
|
| 694 |
+
data = data.reset_index()
|
| 695 |
+
data = data[data["strike"] != 0]
|
| 696 |
+
data["expiration"] = to_datetime(data["expiration"]).dt.strftime("%Y-%m-%d")
|
| 697 |
+
|
| 698 |
+
data.columns = [to_snake_case(c) for c in data.columns.to_list()]
|
| 699 |
+
|
| 700 |
+
return data
|
| 701 |
+
|
| 702 |
+
|
| 703 |
+
async def get_company_filings(
|
| 704 |
+
symbol: str,
|
| 705 |
+
start_date: Optional[str] = (datetime.now() - timedelta(days=30)).strftime(
|
| 706 |
+
"%Y-%m-%d"
|
| 707 |
+
),
|
| 708 |
+
end_date: Optional[str] = datetime.now().date().strftime("%Y-%m-%d"),
|
| 709 |
+
limit: int = 50,
|
| 710 |
+
) -> List[Dict]:
|
| 711 |
+
"""Get company filings."""
|
| 712 |
+
# pylint: disable=import-outside-toplevel
|
| 713 |
+
import json
|
| 714 |
+
|
| 715 |
+
user_agent = get_random_agent()
|
| 716 |
+
results: List[Dict] = []
|
| 717 |
+
symbol = symbol.upper().replace("-", ".").replace(".TO", "").replace(".TSX", "")
|
| 718 |
+
|
| 719 |
+
payload = gql.get_company_filings_payload
|
| 720 |
+
payload["variables"]["symbol"] = symbol
|
| 721 |
+
payload["variables"]["fromDate"] = start_date
|
| 722 |
+
payload["variables"]["toDate"] = end_date
|
| 723 |
+
payload["variables"]["limit"] = limit
|
| 724 |
+
url = "https://app-money.tmx.com/graphql"
|
| 725 |
+
try:
|
| 726 |
+
r = await get_data_from_gql(
|
| 727 |
+
url=url,
|
| 728 |
+
data=json.dumps(payload),
|
| 729 |
+
headers={
|
| 730 |
+
"Accept": "*/*",
|
| 731 |
+
"Accept-Encoding": "gzip, deflate, br",
|
| 732 |
+
"Accept-Language": "en-CA,en-US;q=0.7,en;q=0.3",
|
| 733 |
+
"Connection": "keep-alive",
|
| 734 |
+
"Content-Type": "application/json",
|
| 735 |
+
"Host": "app-money.tmx.com",
|
| 736 |
+
"Origin": "https://money.tmx.com",
|
| 737 |
+
"Referer": "https://money.tmx.com/",
|
| 738 |
+
"locale": "en",
|
| 739 |
+
"Sec-Fetch-Dest": "empty",
|
| 740 |
+
"Sec-Fetch-Mode": "cors",
|
| 741 |
+
"Sec-Fetch-Site": "same-site",
|
| 742 |
+
"TE": "trailers",
|
| 743 |
+
"User-Agent": user_agent,
|
| 744 |
+
},
|
| 745 |
+
)
|
| 746 |
+
except Exception as _e:
|
| 747 |
+
raise OpenBBError(_e) from _e
|
| 748 |
+
if r["data"]["filings"] is None:
|
| 749 |
+
results = []
|
| 750 |
+
results = r.get("data").get("filings")
|
| 751 |
+
|
| 752 |
+
return results
|
| 753 |
+
|
| 754 |
+
|
| 755 |
+
async def get_daily_price_history(
|
| 756 |
+
symbol: str,
|
| 757 |
+
start_date: Optional[Union[str, dateType]] = None,
|
| 758 |
+
end_date: Optional[Union[str, dateType]] = None,
|
| 759 |
+
adjustment: Literal[
|
| 760 |
+
"splits_only", "unadjusted", "splits_and_dividends"
|
| 761 |
+
] = "splits_only",
|
| 762 |
+
):
|
| 763 |
+
"""Get historical price data."""
|
| 764 |
+
# pylint: disable=import-outside-toplevel
|
| 765 |
+
import json # noqa
|
| 766 |
+
import asyncio # noqa
|
| 767 |
+
from dateutil import rrule # noqa
|
| 768 |
+
|
| 769 |
+
start_date = (
|
| 770 |
+
datetime.strptime(start_date, "%Y-%m-%d")
|
| 771 |
+
if isinstance(start_date, str)
|
| 772 |
+
else start_date
|
| 773 |
+
)
|
| 774 |
+
end_date = (
|
| 775 |
+
datetime.strptime(end_date, "%Y-%m-%d")
|
| 776 |
+
if isinstance(end_date, str)
|
| 777 |
+
else end_date
|
| 778 |
+
)
|
| 779 |
+
user_agent = get_random_agent()
|
| 780 |
+
results: List[Dict] = []
|
| 781 |
+
symbol = symbol.upper().replace("-", ".").replace(".TO", "").replace(".TSX", "")
|
| 782 |
+
start_date = (
|
| 783 |
+
(datetime.now() - timedelta(weeks=52)).date()
|
| 784 |
+
if start_date is None
|
| 785 |
+
else start_date
|
| 786 |
+
)
|
| 787 |
+
end_date = datetime.now() if end_date is None else end_date
|
| 788 |
+
|
| 789 |
+
# Generate a list of dates from start_date to end_date with a frequency of 4 weeks
|
| 790 |
+
dates = list(
|
| 791 |
+
rrule.rrule(rrule.WEEKLY, interval=4, dtstart=start_date, until=end_date)
|
| 792 |
+
)
|
| 793 |
+
|
| 794 |
+
# Add end_date to the list if it's not there already
|
| 795 |
+
if dates[-1] != end_date:
|
| 796 |
+
dates.append(end_date) # type: ignore
|
| 797 |
+
|
| 798 |
+
# Create a list of 4-week chunks
|
| 799 |
+
chunks = [
|
| 800 |
+
(dates[i], dates[i + 1] - timedelta(days=1)) for i in range(len(dates) - 1)
|
| 801 |
+
]
|
| 802 |
+
|
| 803 |
+
# Adjust the end date of the last chunk to be the final end date
|
| 804 |
+
chunks[-1] = (chunks[-1][0], end_date) # type: ignore
|
| 805 |
+
|
| 806 |
+
async def create_task(start, end, results):
|
| 807 |
+
"""Create a task from a start and end date chunk."""
|
| 808 |
+
payload = gql.get_company_price_history_payload.copy()
|
| 809 |
+
payload["variables"]["adjusted"] = (
|
| 810 |
+
False if adjustment == "unadjusted" else True # noqa: SIM211
|
| 811 |
+
)
|
| 812 |
+
payload["variables"]["adjustmentType"] = (
|
| 813 |
+
"SO" if adjustment == "splits_only" else None
|
| 814 |
+
)
|
| 815 |
+
payload["variables"]["end"] = end.strftime("%Y-%m-%d")
|
| 816 |
+
payload["variables"]["start"] = start.strftime("%Y-%m-%d")
|
| 817 |
+
payload["variables"]["symbol"] = symbol
|
| 818 |
+
payload["variables"]["unadjusted"] = (
|
| 819 |
+
True if adjustment == "unadjusted" else False # noqa: SIM210
|
| 820 |
+
)
|
| 821 |
+
if payload["variables"]["adjustmentType"] is None:
|
| 822 |
+
payload["variables"].pop("adjustmentType")
|
| 823 |
+
url = "https://app-money.tmx.com/graphql"
|
| 824 |
+
|
| 825 |
+
async def try_again():
|
| 826 |
+
"""Try again if it fails."""
|
| 827 |
+
return await get_data_from_gql(
|
| 828 |
+
method="POST",
|
| 829 |
+
url=url,
|
| 830 |
+
data=json.dumps(payload),
|
| 831 |
+
headers={
|
| 832 |
+
"authority": "app-money.tmx.com",
|
| 833 |
+
"referer": f"https://money.tmx.com/en/quote/{symbol}",
|
| 834 |
+
"locale": "en",
|
| 835 |
+
"Content-Type": "application/json",
|
| 836 |
+
"User-Agent": user_agent,
|
| 837 |
+
"Accept": "*/*",
|
| 838 |
+
},
|
| 839 |
+
timeout=3,
|
| 840 |
+
)
|
| 841 |
+
|
| 842 |
+
try:
|
| 843 |
+
data = await get_data_from_gql(
|
| 844 |
+
method="POST",
|
| 845 |
+
url=url,
|
| 846 |
+
data=json.dumps(payload),
|
| 847 |
+
headers={
|
| 848 |
+
"authority": "app-money.tmx.com",
|
| 849 |
+
"referer": f"https://money.tmx.com/en/quote/{symbol}",
|
| 850 |
+
"locale": "en",
|
| 851 |
+
"Content-Type": "application/json",
|
| 852 |
+
"User-Agent": user_agent,
|
| 853 |
+
"Accept": "*/*",
|
| 854 |
+
},
|
| 855 |
+
timeout=3,
|
| 856 |
+
)
|
| 857 |
+
except Exception:
|
| 858 |
+
data = await try_again()
|
| 859 |
+
|
| 860 |
+
if isinstance(data, str):
|
| 861 |
+
data = await try_again()
|
| 862 |
+
|
| 863 |
+
if data.get("data") and data["data"].get("getCompanyPriceHistory"):
|
| 864 |
+
results.extend(data["data"].get("getCompanyPriceHistory"))
|
| 865 |
+
|
| 866 |
+
return results
|
| 867 |
+
|
| 868 |
+
tasks = [create_task(chunk[0], chunk[1], results) for chunk in chunks]
|
| 869 |
+
|
| 870 |
+
await asyncio.gather(*tasks)
|
| 871 |
+
|
| 872 |
+
results = [d for d in results if d["openPrice"] is not None]
|
| 873 |
+
|
| 874 |
+
return sorted(results, key=lambda x: x["datetime"], reverse=False)
|
| 875 |
+
|
| 876 |
+
|
| 877 |
+
async def get_weekly_or_monthly_price_history(
|
| 878 |
+
symbol: str,
|
| 879 |
+
start_date: Optional[Union[str, dateType]] = None,
|
| 880 |
+
end_date: Optional[Union[str, dateType]] = None,
|
| 881 |
+
interval: Literal["month", "week"] = "month",
|
| 882 |
+
):
|
| 883 |
+
"""Get historical price data."""
|
| 884 |
+
# pylint: disable=import-outside-toplevel
|
| 885 |
+
import json
|
| 886 |
+
|
| 887 |
+
if start_date:
|
| 888 |
+
start_date = (
|
| 889 |
+
datetime.strptime(start_date, "%Y-%m-%d")
|
| 890 |
+
if isinstance(start_date, str)
|
| 891 |
+
else start_date
|
| 892 |
+
)
|
| 893 |
+
if end_date:
|
| 894 |
+
end_date = (
|
| 895 |
+
datetime.strptime(end_date, "%Y-%m-%d")
|
| 896 |
+
if isinstance(end_date, str)
|
| 897 |
+
else end_date
|
| 898 |
+
)
|
| 899 |
+
user_agent = get_random_agent()
|
| 900 |
+
results: List[Dict] = []
|
| 901 |
+
symbol = symbol.upper().replace("-", ".").replace(".TO", "").replace(".TSX", "")
|
| 902 |
+
start_date = (
|
| 903 |
+
(datetime.now() - timedelta(weeks=52 * 100)).date()
|
| 904 |
+
if start_date is None
|
| 905 |
+
else start_date
|
| 906 |
+
)
|
| 907 |
+
end_date = datetime.now() if end_date is None else end_date
|
| 908 |
+
|
| 909 |
+
payload = gql.get_timeseries_payload.copy()
|
| 910 |
+
if "interval" in payload["variables"]:
|
| 911 |
+
payload["variables"].pop("interval")
|
| 912 |
+
if "startDateTime" in payload["variables"]:
|
| 913 |
+
payload["variables"].pop("startDateTime")
|
| 914 |
+
if "endDateTime" in payload["variables"]:
|
| 915 |
+
payload["variables"].pop("endDateTime")
|
| 916 |
+
payload["variables"]["symbol"] = symbol
|
| 917 |
+
payload["variables"]["freq"] = interval
|
| 918 |
+
payload["variables"]["end"] = (
|
| 919 |
+
end_date.strftime("%Y-%m-%d") if isinstance(end_date, dateType) else end_date
|
| 920 |
+
)
|
| 921 |
+
payload["variables"]["start"] = (
|
| 922 |
+
start_date.strftime("%Y-%m-%d")
|
| 923 |
+
if isinstance(start_date, dateType)
|
| 924 |
+
else start_date
|
| 925 |
+
)
|
| 926 |
+
url = "https://app-money.tmx.com/graphql"
|
| 927 |
+
data = await get_data_from_gql(
|
| 928 |
+
method="POST",
|
| 929 |
+
url=url,
|
| 930 |
+
data=json.dumps(payload),
|
| 931 |
+
headers={
|
| 932 |
+
"authority": "app-money.tmx.com",
|
| 933 |
+
"referer": f"https://money.tmx.com/en/quote/{symbol}",
|
| 934 |
+
"locale": "en",
|
| 935 |
+
"Content-Type": "application/json",
|
| 936 |
+
"User-Agent": user_agent,
|
| 937 |
+
"Accept": "*/*",
|
| 938 |
+
},
|
| 939 |
+
timeout=3,
|
| 940 |
+
)
|
| 941 |
+
|
| 942 |
+
async def try_again():
|
| 943 |
+
"""Try again if the request fails."""
|
| 944 |
+
return await get_data_from_gql(
|
| 945 |
+
method="POST",
|
| 946 |
+
url=url,
|
| 947 |
+
data=json.dumps(payload),
|
| 948 |
+
headers={
|
| 949 |
+
"authority": "app-money.tmx.com",
|
| 950 |
+
"referer": f"https://money.tmx.com/en/quote/{symbol}",
|
| 951 |
+
"locale": "en",
|
| 952 |
+
"Content-Type": "application/json",
|
| 953 |
+
"User-Agent": user_agent,
|
| 954 |
+
"Accept": "*/*",
|
| 955 |
+
},
|
| 956 |
+
timeout=3,
|
| 957 |
+
)
|
| 958 |
+
|
| 959 |
+
if isinstance(data, str):
|
| 960 |
+
data = await try_again()
|
| 961 |
+
|
| 962 |
+
if data.get("data") and data["data"].get("getTimeSeriesData"):
|
| 963 |
+
results = data["data"].get("getTimeSeriesData")
|
| 964 |
+
results = sorted(results, key=lambda x: x["dateTime"], reverse=False)
|
| 965 |
+
return results
|
| 966 |
+
|
| 967 |
+
|
| 968 |
+
async def get_intraday_price_history(
|
| 969 |
+
symbol: str,
|
| 970 |
+
start_date: Optional[Union[str, dateType]] = None,
|
| 971 |
+
end_date: Optional[Union[str, dateType]] = None,
|
| 972 |
+
interval: Optional[int] = 1,
|
| 973 |
+
):
|
| 974 |
+
"""Get historical price data."""
|
| 975 |
+
# pylint: disable=import-outside-toplevel
|
| 976 |
+
import json # noqa
|
| 977 |
+
import asyncio # noqa
|
| 978 |
+
import pytz # noqa
|
| 979 |
+
from dateutil import rrule # noqa
|
| 980 |
+
|
| 981 |
+
if start_date:
|
| 982 |
+
start_date = (
|
| 983 |
+
datetime.strptime(start_date, "%Y-%m-%d")
|
| 984 |
+
if isinstance(start_date, str)
|
| 985 |
+
else start_date
|
| 986 |
+
)
|
| 987 |
+
if end_date:
|
| 988 |
+
end_date = (
|
| 989 |
+
datetime.strptime(end_date, "%Y-%m-%d")
|
| 990 |
+
if isinstance(end_date, str)
|
| 991 |
+
else end_date
|
| 992 |
+
)
|
| 993 |
+
user_agent = get_random_agent()
|
| 994 |
+
results: List[Dict] = []
|
| 995 |
+
symbol = symbol.upper().replace("-", ".").replace(".TO", "").replace(".TSX", "")
|
| 996 |
+
start_date = (
|
| 997 |
+
(datetime.now() - timedelta(weeks=4)).date()
|
| 998 |
+
if start_date is None
|
| 999 |
+
else start_date
|
| 1000 |
+
)
|
| 1001 |
+
end_date = datetime.now().date() if end_date is None else end_date
|
| 1002 |
+
# This is the first date of available intraday data.
|
| 1003 |
+
date_check = datetime(2022, 4, 12).date()
|
| 1004 |
+
start_date = max(start_date, date_check)
|
| 1005 |
+
if end_date < date_check: # type: ignore
|
| 1006 |
+
end_date = datetime.now().date()
|
| 1007 |
+
# Generate a list of dates from start_date to end_date with a frequency of 3 weeks
|
| 1008 |
+
dates = list(
|
| 1009 |
+
rrule.rrule(rrule.WEEKLY, interval=4, dtstart=start_date, until=end_date) # type: ignore
|
| 1010 |
+
)
|
| 1011 |
+
|
| 1012 |
+
if dates[-1] != end_date:
|
| 1013 |
+
dates.append(end_date) # type: ignore
|
| 1014 |
+
|
| 1015 |
+
# Create a list of 4-week chunks
|
| 1016 |
+
chunks = [
|
| 1017 |
+
(dates[i], dates[i + 1] - timedelta(days=1)) for i in range(len(dates) - 1)
|
| 1018 |
+
]
|
| 1019 |
+
|
| 1020 |
+
# Adjust the end date of the last chunk to be the final end date
|
| 1021 |
+
chunks[-1] = (chunks[-1][0], end_date) # type: ignore
|
| 1022 |
+
|
| 1023 |
+
async def create_task(start, end, results):
|
| 1024 |
+
"""Create a task from a start and end date chunk."""
|
| 1025 |
+
# Create a datetime object representing 9:30 AM on the date
|
| 1026 |
+
start_obj = datetime.combine(start, time(9, 30))
|
| 1027 |
+
end_obj = datetime.combine(end, time(16, 0))
|
| 1028 |
+
|
| 1029 |
+
# Convert the datetime object to EST
|
| 1030 |
+
est = pytz.timezone("US/Eastern")
|
| 1031 |
+
start_obj_est = est.localize(start_obj)
|
| 1032 |
+
end_obj_est = est.localize(end_obj)
|
| 1033 |
+
|
| 1034 |
+
# Convert the datetime object to a timestamp
|
| 1035 |
+
start_time = int(start_obj_est.timestamp())
|
| 1036 |
+
end_time = int(end_obj_est.timestamp())
|
| 1037 |
+
|
| 1038 |
+
payload = gql.get_timeseries_payload.copy()
|
| 1039 |
+
payload["variables"]["interval"] = None
|
| 1040 |
+
if payload["variables"].get("start"):
|
| 1041 |
+
payload["variables"].pop("start")
|
| 1042 |
+
payload["variables"]["startDateTime"] = int(start_time)
|
| 1043 |
+
if payload["variables"].get("end"):
|
| 1044 |
+
payload["variables"].pop("end")
|
| 1045 |
+
payload["variables"]["endDateTime"] = int(end_time)
|
| 1046 |
+
payload["variables"]["interval"] = interval
|
| 1047 |
+
payload["variables"]["symbol"] = symbol
|
| 1048 |
+
if payload["variables"].get("freq"):
|
| 1049 |
+
payload["variables"].pop("freq")
|
| 1050 |
+
url = "https://app-money.tmx.com/graphql"
|
| 1051 |
+
data = await get_data_from_gql(
|
| 1052 |
+
method="POST",
|
| 1053 |
+
url=url,
|
| 1054 |
+
data=json.dumps(payload),
|
| 1055 |
+
headers={
|
| 1056 |
+
"authority": "app-money.tmx.com",
|
| 1057 |
+
"referer": f"https://money.tmx.com/en/quote/{symbol}",
|
| 1058 |
+
"locale": "en",
|
| 1059 |
+
"Content-Type": "application/json",
|
| 1060 |
+
"User-Agent": user_agent,
|
| 1061 |
+
"Accept": "*/*",
|
| 1062 |
+
},
|
| 1063 |
+
timeout=3,
|
| 1064 |
+
)
|
| 1065 |
+
|
| 1066 |
+
async def try_again():
|
| 1067 |
+
"""Try again if the request fails."""
|
| 1068 |
+
return await get_data_from_gql(
|
| 1069 |
+
method="POST",
|
| 1070 |
+
url=url,
|
| 1071 |
+
data=json.dumps(payload),
|
| 1072 |
+
headers={
|
| 1073 |
+
"authority": "app-money.tmx.com",
|
| 1074 |
+
"referer": f"https://money.tmx.com/en/quote/{symbol}",
|
| 1075 |
+
"locale": "en",
|
| 1076 |
+
"Content-Type": "application/json",
|
| 1077 |
+
"User-Agent": user_agent,
|
| 1078 |
+
"Accept": "*/*",
|
| 1079 |
+
},
|
| 1080 |
+
timeout=3,
|
| 1081 |
+
)
|
| 1082 |
+
|
| 1083 |
+
if isinstance(data, str):
|
| 1084 |
+
data = await try_again()
|
| 1085 |
+
|
| 1086 |
+
if data.get("data") and data["data"].get("getTimeSeriesData"):
|
| 1087 |
+
result = data["data"].get("getTimeSeriesData")
|
| 1088 |
+
results.extend(result)
|
| 1089 |
+
|
| 1090 |
+
return results
|
| 1091 |
+
|
| 1092 |
+
tasks = [create_task(chunk[0], chunk[1], results) for chunk in chunks]
|
| 1093 |
+
|
| 1094 |
+
await asyncio.gather(*tasks)
|
| 1095 |
+
|
| 1096 |
+
if len(results) > 0 and "dateTime" in results[0]:
|
| 1097 |
+
results = sorted(results, key=lambda x: x["dateTime"], reverse=False)
|
| 1098 |
+
|
| 1099 |
+
return results
|
| 1100 |
+
|
| 1101 |
+
|
| 1102 |
+
async def get_all_bonds(use_cache: bool = True) -> "DataFrame":
|
| 1103 |
+
"""Get all bonds reference data published by CIRO.
|
| 1104 |
+
|
| 1105 |
+
The complete list is approximately 70-100K securities.
|
| 1106 |
+
"""
|
| 1107 |
+
# pylint: disable=import-outside-toplevel
|
| 1108 |
+
from aiohttp_client_cache import SQLiteBackend # noqa
|
| 1109 |
+
from openbb_core.app.utils import get_user_cache_directory # noqa
|
| 1110 |
+
from pandas import DataFrame # noqa
|
| 1111 |
+
|
| 1112 |
+
tmx_bonds_backend = SQLiteBackend(
|
| 1113 |
+
f"{get_user_cache_directory()}/http/tmx_bonds", expire_after=timedelta(days=1)
|
| 1114 |
+
)
|
| 1115 |
+
|
| 1116 |
+
url = "https://bondtradedata.iiroc.ca/debtip/designatedbonds/list"
|
| 1117 |
+
response = await get_data_from_url(
|
| 1118 |
+
url, use_cache=use_cache, timeout=30, backend=tmx_bonds_backend
|
| 1119 |
+
)
|
| 1120 |
+
|
| 1121 |
+
# Convert the response to a DataFrame and set the types for proper filtering in-fetcher.
|
| 1122 |
+
# This is done here because multiple functions might share this response object.
|
| 1123 |
+
bonds_data = (
|
| 1124 |
+
DataFrame.from_records(response)
|
| 1125 |
+
.replace("N/A", None)
|
| 1126 |
+
.sort_values(by=["lastTradedDate", "totalTrades"], ascending=False)
|
| 1127 |
+
)
|
| 1128 |
+
|
| 1129 |
+
bonds_data["issuer"] = (
|
| 1130 |
+
bonds_data["issuer"].fillna("-").replace("-", None).astype(str)
|
| 1131 |
+
)
|
| 1132 |
+
|
| 1133 |
+
int_columns = ["totalTrades", "secKey"]
|
| 1134 |
+
for column in int_columns:
|
| 1135 |
+
bonds_data[column] = bonds_data[column].astype(int)
|
| 1136 |
+
|
| 1137 |
+
float_columns = [
|
| 1138 |
+
"lastPrice",
|
| 1139 |
+
"lowestPrice",
|
| 1140 |
+
"highestPrice",
|
| 1141 |
+
"lastYield",
|
| 1142 |
+
"couponRate",
|
| 1143 |
+
]
|
| 1144 |
+
for column in float_columns:
|
| 1145 |
+
bonds_data[column] = bonds_data[column].astype(float)
|
| 1146 |
+
|
| 1147 |
+
return bonds_data
|