diff --git a/openbb_platform/core/README.md b/openbb_platform/core/README.md new file mode 100644 index 0000000000000000000000000000000000000000..741cb8c4a22b7342de3e072b1633feffc6af5a75 --- /dev/null +++ b/openbb_platform/core/README.md @@ -0,0 +1,57 @@ +# OpenBB Platform - Core + +## Overview + +The Core extension serves as the foundational component of the OpenBB Platform. It encapsulates essential functionalities and serves as an infrastructural base for other extensions. This extension is vital for maintaining the integrity and standardization of the platform. + +## Key Features + +- **Standardized Data Model** (`Data` Class): A flexible and dynamic Pydantic model capable of handling various data structures. +- **Standardized Query Params** (`QueryParams` Class): A Pydantic model for handling querying to different providers. +- **Dynamic Field Support**: Enables handling of undefined fields, providing versatility in data processing. +- **Robust Data Validation**: Utilizes Pydantic's validation features to ensure data integrity. +- **API Routing Mechanism** (`Router` Class): Simplifies the process of defining API routes and endpoints - out of the box Python and Web endpoints. + +## Getting Started + +### Prerequisites + +- Python 3.9 or higher. +- Familiarity with FastAPI and Pydantic. + +### Installation + +Installing through pip: + +```bash +pip install openbb-core +``` + +> Note that, the openbb-core is an infrastructural component of the OpenBB Platform. It is not intended to be used as a standalone package. + +### Usage + +The Core extension is used as the basis for building and integrating new data sources, providers, and extensions into the OpenBB Platform. It provides the necessary classes and structures for standardizing and handling data. + +### Contributing + +We welcome contributions! If you're looking to contribute, please: + +- Follow the existing coding standards and conventions. +- Write clear, documented code. +- Ensure your code does not negatively impact performance. +- Test your contributions thoroughly. + +Please refer to our [Contributing Guidelines](https://docs.openbb.co/platform/developer_guide/contributing). + +### Collaboration + +Engage with the development team and the community. Be open to feedback and collaborative discussions. + +### Support + +For support, questions, or more information, please visit [OpenBB Platform Documentation](https://docs.openbb.co/platform). + +### License + +This project is licensed under the MIT License - see the [LICENSE.md](https://github.com/OpenBB-finance/OpenBB/blob/main/LICENSE) file for details. diff --git a/openbb_platform/core/__init__.py b/openbb_platform/core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6235d7617a7aa138c7c727dcd2eb3505a64c1a24 --- /dev/null +++ b/openbb_platform/core/__init__.py @@ -0,0 +1 @@ +"""OpenBB Core Module.""" diff --git a/openbb_platform/core/integration/test_obbject.py b/openbb_platform/core/integration/test_obbject.py new file mode 100644 index 0000000000000000000000000000000000000000..aaa1a940ffcb9e290ddfb7659d3ed6c3e0b1005c --- /dev/null +++ b/openbb_platform/core/integration/test_obbject.py @@ -0,0 +1,96 @@ +"""Test the OBBject.""" + +import contextlib +import sys + +import pytest + +with contextlib.suppress(ImportError): + import polars as pl + +with contextlib.suppress(ImportError): + import pandas as pd + +with contextlib.suppress(ImportError): + import numpy as np + +with contextlib.suppress(ImportError): + from openbb_charting.core.openbb_figure import OpenBBFigure + + +# pylint: disable=inconsistent-return-statements +@pytest.fixture(scope="session") +def obb(pytestconfig): + """Fixture to setup obb.""" + + if pytestconfig.getoption("markexpr") != "not integration": + import openbb # pylint: disable=import-outside-toplevel + + return openbb.obb + + +# pylint: disable=redefined-outer-name + + +@pytest.mark.skipif("pandas" not in sys.modules, reason="pandas not installed") +@pytest.mark.integration +def test_to_dataframe(obb): + """Test obbject to dataframe.""" + + stocks_df = obb.equity.price.historical("AAPL", provider="fmp").to_dataframe() + assert isinstance(stocks_df, pd.DataFrame) + + +@pytest.mark.skipif( + "polars" not in sys.modules or "polars-lts-cpu" not in sys.modules, + reason="polars not installed", +) +@pytest.mark.integration +def test_to_polars(obb): + """Test obbject to polars.""" + + crypto_pl = obb.crypto.price.historical("BTC-USD", provider="fmp").to_polars() + assert isinstance(crypto_pl, pl.DataFrame) + + +@pytest.mark.skipif("numpy" not in sys.modules, reason="numpy not installed") +@pytest.mark.integration +def test_to_numpy(obb): + """Test obbject to numpy array.""" + + cpi_np = obb.economy.cpi( + country=["portugal", "spain", "switzerland"], frequency="annual" + ).to_numpy() + assert isinstance(cpi_np, np.ndarray) + + +@pytest.mark.integration +def test_to_dict(obb): + """Test obbject to dict.""" + + fed_dict = obb.fixedincome.rate.ameribor(start_date="2020-01-01").to_dict() + assert isinstance(fed_dict, dict) + + +@pytest.mark.skipif( + "openbb_charting" not in sys.modules, reason="openbb_charting not installed" +) +@pytest.mark.integration +def test_to_chart(obb): + """Test obbject to chart.""" + + res = obb.equity.price.historical("AAPL", provider="fmp") + res.charting.to_chart(render=False) + assert isinstance(res.chart.fig, OpenBBFigure) + + +@pytest.mark.skipif( + "openbb_charting" not in sys.modules, reason="openbb_charting not installed" +) +@pytest.mark.integration +def test_show(obb): + """Test obbject to chart.""" + + stocks_data = obb.equity.price.historical("AAPL", provider="fmp", chart=True) + assert isinstance(stocks_data.chart.fig, OpenBBFigure) + assert stocks_data.chart.fig.show() is None diff --git a/openbb_platform/core/openbb_core/__init__.py b/openbb_platform/core/openbb_core/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..13627db21b033ef531de5deeba3d43a60efb0ead --- /dev/null +++ b/openbb_platform/core/openbb_core/__init__.py @@ -0,0 +1 @@ +"""OpenBB Core.""" diff --git a/openbb_platform/core/openbb_core/api/app_loader.py b/openbb_platform/core/openbb_core/api/app_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..a336e8e9dfc721fedeb4ca3e198aff2362abab85 --- /dev/null +++ b/openbb_platform/core/openbb_core/api/app_loader.py @@ -0,0 +1,45 @@ +"""App loader module.""" + +from typing import List, Optional + +from fastapi import APIRouter, FastAPI +from fastapi.exceptions import ResponseValidationError +from openbb_core.api.exception_handlers import ExceptionHandlers +from openbb_core.app.model.abstract.error import OpenBBError +from openbb_core.app.router import RouterLoader +from openbb_core.provider.utils.errors import EmptyDataError, UnauthorizedError +from pydantic import ValidationError + + +class AppLoader: + """App loader.""" + + @staticmethod + def add_routers(app: FastAPI, routers: List[Optional[APIRouter]], prefix: str): + """Add routers.""" + for router in routers: + if router: + app.include_router(router=router, prefix=prefix) + + @staticmethod + def add_openapi_tags(app: FastAPI): + """Add openapi tags.""" + main_router = RouterLoader.from_extensions() + # Add tag data for each router in the main router + app.openapi_tags = [ + { + "name": r, + "description": main_router.get_attr(r, "description"), + } + for r in main_router.routers + ] + + @staticmethod + def add_exception_handlers(app: FastAPI): + """Add exception handlers.""" + app.exception_handlers[Exception] = ExceptionHandlers.exception + app.exception_handlers[ValidationError] = ExceptionHandlers.validation + app.exception_handlers[ResponseValidationError] = ExceptionHandlers.validation + app.exception_handlers[OpenBBError] = ExceptionHandlers.openbb + app.exception_handlers[EmptyDataError] = ExceptionHandlers.empty_data + app.exception_handlers[UnauthorizedError] = ExceptionHandlers.unauthorized diff --git a/openbb_platform/core/openbb_core/api/auth/user.py b/openbb_platform/core/openbb_core/api/auth/user.py new file mode 100644 index 0000000000000000000000000000000000000000..ecd16b9b3d05bb5227506625cb8c968ac6dfd7d3 --- /dev/null +++ b/openbb_platform/core/openbb_core/api/auth/user.py @@ -0,0 +1,57 @@ +"""User authentication.""" + +import secrets +from typing import Optional + +from fastapi import Depends, HTTPException, status +from fastapi.security import HTTPBasic, HTTPBasicCredentials +from openbb_core.app.model.user_settings import UserSettings +from openbb_core.app.service.user_service import UserService +from openbb_core.env import Env +from typing_extensions import Annotated + +security = HTTPBasic() if Env().API_AUTH else lambda: None + + +async def authenticate_user( + credentials: Annotated[Optional[HTTPBasicCredentials], Depends(security)], +): + """Authenticate the user.""" + if credentials: + username = Env().API_USERNAME + password = Env().API_PASSWORD + + is_correct_username = False + is_correct_password = False + + if username is not None and password is not None: + current_username_bytes = credentials.username.encode("utf8") + correct_username_bytes = username.encode("utf8") + is_correct_username = secrets.compare_digest( + current_username_bytes, correct_username_bytes + ) + current_password_bytes = credentials.password.encode("utf8") + correct_password_bytes = password.encode("utf8") + is_correct_password = secrets.compare_digest( + current_password_bytes, correct_password_bytes + ) + + if not (is_correct_username and is_correct_password): + raise HTTPException( + status_code=status.HTTP_401_UNAUTHORIZED, + detail="Incorrect email or password", + headers={"WWW-Authenticate": "Basic"}, + ) + + +async def get_user_service() -> UserService: + """Get user service.""" + return UserService() + + +async def get_user_settings( + _: Annotated[None, Depends(authenticate_user)], + user_service: Annotated[UserService, Depends(get_user_service)], +) -> UserSettings: + """Get user settings.""" + return user_service.read_from_file() diff --git a/openbb_platform/core/openbb_core/api/dependency/__init__.py b/openbb_platform/core/openbb_core/api/dependency/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..4abe38b227a9aad4ae7320f71a03ad7d98f829a0 --- /dev/null +++ b/openbb_platform/core/openbb_core/api/dependency/__init__.py @@ -0,0 +1 @@ +"""OpenBB Core API Dependency.""" diff --git a/openbb_platform/core/openbb_core/api/dependency/coverage.py b/openbb_platform/core/openbb_core/api/dependency/coverage.py new file mode 100644 index 0000000000000000000000000000000000000000..9470d7c227beaf79a5331278043d3fdfbf4eceaa --- /dev/null +++ b/openbb_platform/core/openbb_core/api/dependency/coverage.py @@ -0,0 +1,21 @@ +"""Coverage dependency.""" + +from fastapi import Depends +from openbb_core.app.provider_interface import ProviderInterface +from openbb_core.app.router import CommandMap +from openbb_core.app.service.auth_service import AuthService +from typing_extensions import Annotated + + +async def get_command_map( + _: Annotated[None, Depends(AuthService().auth_hook)] +) -> CommandMap: + """Get command map.""" + return CommandMap() + + +async def get_provider_interface( + _: Annotated[None, Depends(AuthService().auth_hook)] +) -> ProviderInterface: + """Get provider interface.""" + return ProviderInterface() diff --git a/openbb_platform/core/openbb_core/api/dependency/system.py b/openbb_platform/core/openbb_core/api/dependency/system.py new file mode 100644 index 0000000000000000000000000000000000000000..df3192b21a90a2ae7855343500674da52d23d165 --- /dev/null +++ b/openbb_platform/core/openbb_core/api/dependency/system.py @@ -0,0 +1,20 @@ +"""System dependency.""" + +from fastapi import Depends +from openbb_core.app.model.system_settings import SystemSettings +from openbb_core.app.service.auth_service import AuthService +from openbb_core.app.service.system_service import SystemService +from typing_extensions import Annotated + + +async def get_system_service() -> SystemService: + """Get system service.""" + return SystemService() + + +async def get_system_settings( + _: Annotated[None, Depends(AuthService().auth_hook)], + system_service: Annotated[SystemService, Depends(get_system_service)], +) -> SystemSettings: + """Get system settings.""" + return system_service.system_settings diff --git a/openbb_platform/core/openbb_core/api/exception_handlers.py b/openbb_platform/core/openbb_core/api/exception_handlers.py new file mode 100644 index 0000000000000000000000000000000000000000..22ff808adb98798ab48d41e04a4f11b1d23914d9 --- /dev/null +++ b/openbb_platform/core/openbb_core/api/exception_handlers.py @@ -0,0 +1,134 @@ +"""Exception handlers module.""" + +# pylint: disable=unused-argument + +import logging +from collections.abc import Iterable +from typing import Any, Union + +from fastapi import Request +from fastapi.exceptions import ResponseValidationError +from fastapi.responses import JSONResponse, Response +from openbb_core.app.model.abstract.error import OpenBBError +from openbb_core.env import Env +from openbb_core.provider.utils.errors import EmptyDataError, UnauthorizedError +from pydantic import ValidationError + +logger = logging.getLogger("uvicorn.error") + + +class ExceptionHandlers: + """Exception handlers.""" + + @staticmethod + async def _handle(exception: Exception, status_code: int, detail: Any): + """Exception handler.""" + if Env().DEBUG_MODE: + raise exception + logger.error(exception) + return JSONResponse( + status_code=status_code, + content={ + "detail": detail, + }, + ) + + @staticmethod + async def exception(_: Request, error: Exception) -> JSONResponse: + """Exception handler for Base Exception.""" + errors = error.errors if hasattr(error, "errors") else error + + if errors: + if isinstance(errors, ValueError): + return await ExceptionHandlers._handle( + exception=errors, + status_code=422, + detail=errors.args, + ) + # Required parameters are missing and is not handled by ValidationError. + if isinstance(errors, Iterable): + for err in errors: + if err.get("type") == "missing": + return await ExceptionHandlers._handle( + exception=error, + status_code=422, + detail={**err}, + ) + return await ExceptionHandlers._handle( + exception=error, + status_code=500, + detail=f"Unexpected Error -> {error.__class__.__name__} -> {error}", + ) + + @staticmethod + async def validation( + request: Request, error: Union[ValidationError, ResponseValidationError] + ): + """Exception handler for ValidationError.""" + # Some validation is performed at Fetcher level. + # So we check if the validation error comes from a QueryParams class. + # And that it is in the request query params. + # If yes, we update the error location with query. + # If not, we handle it as a base Exception error. + query_params = dict(request.query_params) + if isinstance(error, ResponseValidationError): + detail = [ + { + **{k: v for k, v in err.items() if k != "ctx"}, + "loc": ("query",) + err.get("loc", ()), + } + for err in error.errors() + ] + return await ExceptionHandlers._handle( + exception=error, + status_code=422, + detail=detail, + ) + try: + errors = ( + error.errors(include_url=False) + if hasattr(error, "errors") + else error.errors + ) + except Exception: + errors = error.errors if hasattr(error, "errors") else error + all_in_query = all( + loc in query_params for err in errors for loc in err.get("loc", ()) + ) + if "QueryParams" in error.title and all_in_query: + detail = [ + { + **{k: v for k, v in err.items() if k != "ctx"}, + "loc": ("query",) + err.get("loc", ()), + } + for err in errors + ] + return await ExceptionHandlers._handle( + exception=error, + status_code=422, + detail=detail, + ) + return await ExceptionHandlers.exception(request, error) + + @staticmethod + async def openbb(_: Request, error: OpenBBError): + """Exception handler for OpenBBError.""" + return await ExceptionHandlers._handle( + exception=error, + status_code=400, + detail=str(error.original), + ) + + @staticmethod + async def empty_data(_: Request, error: EmptyDataError): + """Exception handler for EmptyDataError.""" + return Response(status_code=204) + + @staticmethod + async def unauthorized(_: Request, error: UnauthorizedError): + """Exception handler for OpenBBError.""" + return await ExceptionHandlers._handle( + exception=error, + status_code=502, + detail=str(error.original), + ) diff --git a/openbb_platform/core/openbb_core/api/rest_api.py b/openbb_platform/core/openbb_core/api/rest_api.py new file mode 100644 index 0000000000000000000000000000000000000000..96e7b60b57d93350455489d05bc15418dc2bb20c --- /dev/null +++ b/openbb_platform/core/openbb_core/api/rest_api.py @@ -0,0 +1,105 @@ +"""REST API for the OpenBB Platform.""" + +import logging +from contextlib import asynccontextmanager + +from fastapi import FastAPI +from fastapi.middleware.cors import CORSMiddleware +from openbb_core.api.app_loader import AppLoader +from openbb_core.api.router.commands import router as router_commands +from openbb_core.api.router.coverage import router as router_coverage +from openbb_core.api.router.system import router as router_system +from openbb_core.app.service.auth_service import AuthService +from openbb_core.app.service.system_service import SystemService +from openbb_core.env import Env + +logger = logging.getLogger("uvicorn.error") + +system = SystemService().system_settings + + +@asynccontextmanager +async def lifespan(_: FastAPI): + """Startup event.""" + auth = "ENABLED" if Env().API_AUTH else "DISABLED" + banner = rf""" + + ███╗ + █████████████████╔══█████████████████╗ OpenBB Platform v{system.version} + ███╔══════════███║ ███╔══════════███║ + █████████████████║ █████████████████║ Authentication: {auth} + ╚═════════════███║ ███╔═════════════╝ + ██████████████║ ██████████████╗ + ███╔═══════███║ ███╔═══════███║ + ██████████████║ ██████████████║ + ╚═════════════╝ ╚═════════════╝ +Investment research for everyone, anywhere. + + https://my.openbb.co/app/platform + +""" + logger.info(banner) + yield + + +app = FastAPI( + title=system.api_settings.title, + description=system.api_settings.description, + version=system.api_settings.version, + terms_of_service=system.api_settings.terms_of_service, + contact={ + "name": system.api_settings.contact_name, + "url": system.api_settings.contact_url, + "email": system.api_settings.contact_email, + }, + license_info={ + "name": system.api_settings.license_name, + "url": system.api_settings.license_url, + }, + servers=[ + { + "url": s.url, + "description": s.description, + } + for s in system.api_settings.servers + ], + lifespan=lifespan, +) +app.add_middleware( + CORSMiddleware, + allow_origins=system.api_settings.cors.allow_origins, + allow_methods=system.api_settings.cors.allow_methods, + allow_headers=system.api_settings.cors.allow_headers, +) +AppLoader.add_routers( + app=app, + routers=( + [AuthService().router, router_system, router_coverage, router_commands] + if Env().DEV_MODE + else ( + [router_commands, router_coverage] + if hasattr(router_commands, "routes") and router_commands.routes + else [router_commands] + ) + ), + prefix=system.api_settings.prefix, +) +AppLoader.add_openapi_tags(app) +AppLoader.add_exception_handlers(app) + + +if __name__ == "__main__": + # pylint: disable=import-outside-toplevel + import uvicorn + + # This initializes the OpenBB environment variables so they can be read before uvicorn is run. + Env() + uvicorn_kwargs = system.python_settings.model_dump().get("uvicorn", {}) + uvicorn_reload = uvicorn_kwargs.pop("reload", None) + + if uvicorn_reload is None or uvicorn_reload: + uvicorn_kwargs["reload"] = True + + uvicorn_app = uvicorn_kwargs.pop("app", "openbb_core.api.rest_api:app") + + uvicorn.run(uvicorn_app, **uvicorn_kwargs) diff --git a/openbb_platform/core/openbb_core/api/router/__init__.py b/openbb_platform/core/openbb_core/api/router/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..93fabc683c4fe582003a82a8ce9312ffc1c63a99 --- /dev/null +++ b/openbb_platform/core/openbb_core/api/router/__init__.py @@ -0,0 +1 @@ +"""OpenBB Core API Router.""" diff --git a/openbb_platform/core/openbb_core/api/router/commands.py b/openbb_platform/core/openbb_core/api/router/commands.py new file mode 100644 index 0000000000000000000000000000000000000000..cdd0f28624cbcb4ff67b1d47eb9a3ac37ebc6880 --- /dev/null +++ b/openbb_platform/core/openbb_core/api/router/commands.py @@ -0,0 +1,266 @@ +"""Commands: generates the command map.""" + +import inspect +from functools import partial, wraps +from inspect import Parameter, Signature, signature +from typing import Any, Callable, Dict, Optional, Tuple, TypeVar + +from fastapi import APIRouter, Depends, Header +from fastapi.routing import APIRoute +from openbb_core.app.command_runner import CommandRunner +from openbb_core.app.model.command_context import CommandContext +from openbb_core.app.model.obbject import OBBject +from openbb_core.app.model.user_settings import UserSettings +from openbb_core.app.router import RouterLoader +from openbb_core.app.service.auth_service import AuthService +from openbb_core.app.service.system_service import SystemService +from openbb_core.app.service.user_service import UserService +from openbb_core.env import Env +from pydantic import BaseModel +from typing_extensions import Annotated, ParamSpec + +try: + from openbb_charting import Charting + + CHARTING_INSTALLED = True +except ImportError: + CHARTING_INSTALLED = False + +T = TypeVar("T") +P = ParamSpec("P") + +router = APIRouter(prefix="") + + +def build_new_annotation_map(sig: Signature) -> Dict[str, Any]: + """Build new annotation map.""" + annotation_map = {} + parameter_list = sig.parameters.values() + + for parameter in parameter_list: + annotation_map[parameter.name] = parameter.annotation + + annotation_map["return"] = sig.return_annotation + + return annotation_map + + +def build_new_signature(path: str, func: Callable) -> Signature: + """Build new function signature.""" + sig = signature(func) + parameter_list = sig.parameters.values() + return_annotation = sig.return_annotation + new_parameter_list = [] + var_kw_pos = len(parameter_list) + for pos, parameter in enumerate(parameter_list): + if parameter.name == "cc" and parameter.annotation == CommandContext: + continue + + if parameter.kind == Parameter.VAR_KEYWORD: + # We track VAR_KEYWORD parameter to insert the any additional + # parameters we need to add before it and avoid a SyntaxError + var_kw_pos = pos + + new_parameter_list.append( + Parameter( + parameter.name, + kind=parameter.kind, + default=parameter.default, + annotation=parameter.annotation, + ) + ) + + if CHARTING_INSTALLED and path.replace("/", "_")[1:] in Charting.functions(): + new_parameter_list.insert( + var_kw_pos, + Parameter( + "chart", + kind=Parameter.POSITIONAL_OR_KEYWORD, + default=False, + annotation=bool, + ), + ) + var_kw_pos += 1 + + if custom_headers := SystemService().system_settings.api_settings.custom_headers: + for name, default in custom_headers.items(): + new_parameter_list.insert( + var_kw_pos, + Parameter( + name.replace("-", "_"), + kind=Parameter.POSITIONAL_OR_KEYWORD, + default=default, + annotation=Annotated[ + Optional[str], Header(include_in_schema=False) + ], + ), + ) + var_kw_pos += 1 + + if Env().API_AUTH: + new_parameter_list.insert( + var_kw_pos, + Parameter( + "__authenticated_user_settings", + kind=Parameter.POSITIONAL_OR_KEYWORD, + default=UserSettings(), + annotation=Annotated[ + UserSettings, Depends(AuthService().user_settings_hook) + ], + ), + ) + var_kw_pos += 1 + + return Signature( + parameters=new_parameter_list, + return_annotation=return_annotation, + ) + + +def validate_output(c_out: OBBject) -> OBBject: + """ + Validate OBBject object. + + Checks against the OBBject schema and removes fields that contain the + `exclude_from_api` extra `pydantic.Field` kwarg. + Note that the modification to the `OBBject` object is done in-place. + + Parameters + ---------- + c_out : OBBject + OBBject object to validate. + + Returns + ------- + Dict + Serialized OBBject. + """ + + def is_model(type_): + return inspect.isclass(type_) and issubclass(type_, BaseModel) + + def exclude_fields_from_api(key: str, value: Any): + type_ = type(value) + field = c_out.model_fields.get(key, None) + json_schema_extra = field.json_schema_extra if field else None + + # case where 1st layer field needs to be excluded + if ( + json_schema_extra + and isinstance(json_schema_extra, dict) + and json_schema_extra.get("exclude_from_api", None) + ): + delattr(c_out, key) + + # if it's a model with nested fields + elif is_model(type_): + for field_name, field in type_.model_fields.items(): + extra = getattr(field, "json_schema_extra", None) + if ( + extra + and isinstance(extra, dict) + and extra.get("exclude_from_api", None) + ): + delattr(value, field_name) + + # if it's a yet a nested model we need to go deeper in the recursion + elif is_model(getattr(field, "annotation", None)): + exclude_fields_from_api(field_name, getattr(value, field_name)) + + # Let a non-OBBject object pass through without validation + if not isinstance(c_out, OBBject): + return c_out + + for k, v in c_out.model_copy(): + exclude_fields_from_api(k, v) + + return c_out + + +def build_api_wrapper( + command_runner: CommandRunner, + route: APIRoute, +) -> Callable: + """Build API wrapper for a command.""" + func: Callable = route.endpoint # type: ignore + path: str = route.path # type: ignore + + no_validate = ( + openapi_extra.get("no_validate") + if (openapi_extra := getattr(route, "openapi_extra", None)) + else None + ) + new_signature = build_new_signature(path=path, func=func) + new_annotations_map = build_new_annotation_map(sig=new_signature) + func.__signature__ = new_signature # type: ignore + func.__annotations__ = new_annotations_map + + if no_validate is True: + route.response_model = None + + @wraps(wrapped=func) + async def wrapper(*args: Tuple[Any], **kwargs: Dict[str, Any]) -> OBBject: + user_settings: UserSettings = UserSettings.model_validate( + kwargs.pop( + "__authenticated_user_settings", + UserService.read_from_file(), + ) + ) + p = path.strip("/").replace("/", ".") + defaults = ( + getattr(user_settings.defaults, "__dict__", {}) + .get("commands", {}) + .get(p, {}) + ) + + if defaults: + _provider = defaults.pop("provider", None) + standard_params = getattr( + kwargs.pop("standard_params", None), "__dict__", {} + ) + extra_params = getattr(kwargs.pop("extra_params", None), "__dict__", {}) + + if "chart" in defaults: + kwargs["chart"] = defaults.pop("chart", False) + + if "chart_params" in defaults: + extra_params["chart_params"] = defaults.pop("chart_params", {}) + + for k, v in defaults.items(): + if k in standard_params and standard_params[k] is None: + standard_params[k] = v + elif (k in standard_params and standard_params[k] is not None) or ( + k in extra_params and extra_params[k] is not None + ): + continue + elif k not in extra_params or ( + k in extra_params and extra_params[k] is None + ): + extra_params[k] = v + + kwargs["standard_params"] = standard_params + kwargs["extra_params"] = extra_params + + execute = partial(command_runner.run, path, user_settings) + output = await execute(*args, **kwargs) + + if isinstance(output, OBBject) and not no_validate: + return validate_output(output) + + return output + + return wrapper + + +def add_command_map(command_runner: CommandRunner, api_router: APIRouter) -> None: + """Add command map to the API router.""" + plugins_router = RouterLoader.from_extensions() + + for route in plugins_router.api_router.routes: + route.endpoint = build_api_wrapper(command_runner=command_runner, route=route) # type: ignore # noqa + api_router.include_router(router=plugins_router.api_router) + + +system_settings = SystemService(logging_sub_app="api").system_settings +command_runner_instance = CommandRunner(system_settings=system_settings) +add_command_map(command_runner=command_runner_instance, api_router=router) diff --git a/openbb_platform/core/openbb_core/api/router/coverage.py b/openbb_platform/core/openbb_core/api/router/coverage.py new file mode 100644 index 0000000000000000000000000000000000000000..6f2d97f8d33d9140124c174301f16e0706007a42 --- /dev/null +++ b/openbb_platform/core/openbb_core/api/router/coverage.py @@ -0,0 +1,105 @@ +"""Coverage API router.""" + +import json + +from fastapi import APIRouter, Depends +from openbb_core.api.dependency.coverage import get_command_map, get_provider_interface +from openbb_core.app.provider_interface import ProviderInterface +from openbb_core.app.router import CommandMap +from typing_extensions import Annotated + +router = APIRouter(prefix="/coverage", tags=["Coverage"]) + + +@router.get("/command_model", openapi_extra={"widget_config": {"exclude": True}}) +async def get_commands_model_map( + command_map: Annotated[CommandMap, Depends(get_command_map)], + provider_interface: Annotated[ProviderInterface, Depends(get_provider_interface)], +): + """Get the command to provider model mapping.""" + + commands_map: dict = {} + + for command in command_map.commands_model: + model = command_map.commands_model[command] + pi_command = provider_interface.map[model] + schema = provider_interface.return_annotations[model] + providers = list(pi_command) + new_command: dict = {} + new_command["response_schema_name"] = schema.__name__ if schema else None + for provider in providers: + new_command[provider] = { + "QueryParams": {"docstring": "", "fields": {}}, + "Data": {"docstring": "", "fields": {}}, + } + p = pi_command[provider] + query = p.get("QueryParams", {}) + query_fields = query.get("fields", {}) + data = p.get("Data", {}) + data_fields = data.get("fields", {}) + + for field, field_info in query_fields.items(): + attributes = ( + field_info._attributes_set # pylint: disable=protected-access + ) + if attributes.get("annotation"): + _annotation = str(attributes.get("annotation")) + attributes["annotation"] = _annotation + + new_command[provider]["QueryParams"]["fields"][field] = attributes + + new_command[provider]["QueryParams"]["docstring"] = query.get("docstring") + + for field, field_info in data_fields.items(): + attributes = ( + field_info._attributes_set # pylint: disable=protected-access + ) + if attributes.get("annotation"): + _annotation = str(attributes.get("annotation")) + attributes["annotation"] = _annotation + new_command[provider]["Data"]["fields"][field] = attributes + + new_command[provider]["Data"]["docstring"] = data.get("docstring") + + if openbb_info := new_command.get("openbb", {}): + for key in list(new_command): + if key == "response_schema_name": + continue + + if obb_params := openbb_info.get("QueryParams", {}).get( + "fields", {} + ): + old_fields = new_command[key]["QueryParams"].get("fields", {}) + new_command[key]["QueryParams"]["fields"] = { + **obb_params, + **old_fields, + } + if obb_data := openbb_info.get("Data", {}).get("fields", {}): + old_fields = new_command[key]["Data"].get("fields", {}) + new_command[key]["Data"]["fields"] = {**obb_data, **old_fields} + _ = new_command.pop("openbb") + commands_map[command] = new_command + + def serializer(obj): + """Serialize the object.""" + if isinstance(obj, type): + return str(obj) + return obj + + return json.loads(json.dumps(commands_map, default=serializer, indent=4)) + + +@router.get("/providers", openapi_extra={"widget_config": {"exclude": True}}) +async def get_provider_coverage( + command_map: Annotated[CommandMap, Depends(get_command_map)] +): + """Get command coverage by provider.""" + return command_map.provider_coverage + + +@router.get("/commands", openapi_extra={"widget_config": {"exclude": True}}) +async def get_command_coverage( + command_map: Annotated[CommandMap, Depends(get_command_map)] +): + """Get provider coverage by command.""" + return command_map.command_coverage diff --git a/openbb_platform/core/openbb_core/api/router/helpers/__init__.py b/openbb_platform/core/openbb_core/api/router/helpers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..fd226e138085b49df7df8b7df7dd4b33b951999c --- /dev/null +++ b/openbb_platform/core/openbb_core/api/router/helpers/__init__.py @@ -0,0 +1 @@ +"""The init of the coverage helpers.""" diff --git a/openbb_platform/core/openbb_core/api/router/helpers/coverage_helpers.py b/openbb_platform/core/openbb_core/api/router/helpers/coverage_helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..97fe666fd8aeb172f4ae6502318ec5e28a1add2c --- /dev/null +++ b/openbb_platform/core/openbb_core/api/router/helpers/coverage_helpers.py @@ -0,0 +1,120 @@ +"""Coverage API router helper functions.""" + +from inspect import _empty, signature +from typing import TYPE_CHECKING, Any, Callable, Dict, Optional, Tuple, Type + +from openbb_core.app.provider_interface import ProviderInterface +from pydantic import BaseModel, Field, create_model + +if TYPE_CHECKING: + from openbb_core.app.static.app_factory import BaseApp + +provider_interface = ProviderInterface() + + +def get_route_callable(app: "BaseApp", route: str) -> Callable: + """Get the callable for a route.""" + # TODO: Add return typing Optional[Callable] to this function. First need to + # figure how to do that starting from "BaseApp" and account for the possibility + # of a route not existing. Then remove the type: ignore from the function. + + split_route = route.replace(".", "/").split("/")[1:] + + return_callable = app + + for route_path in split_route: + return_callable = getattr(return_callable, route_path) + + return return_callable # type: ignore + + +def signature_to_fields(app: "BaseApp", route: str) -> Dict[str, Tuple[Any, Field]]: # type: ignore + """Convert a command signature to pydantic fields.""" + return_callable = get_route_callable(app, route) + sig = signature(return_callable) + + fields = {} + for name, param in sig.parameters.items(): + if name not in ["kwargs", "args"]: + type_annotation = ( + param.annotation if param.annotation is not _empty else Any + ) + description = ( + param.annotation.__metadata__[0].description + if hasattr(param.annotation, "__metadata__") + else None + ) + fields[name] = ( + type_annotation, + Field(..., title="openbb", description=description), + ) + + return fields + + +def dataclass_to_fields(model_name: str) -> Dict[str, Tuple[Any, Field]]: # type: ignore + """Convert a dataclass to pydantic fields.""" + dataclass = provider_interface.params[model_name]["extra"] + fields = {} + for name, field in dataclass.__dataclass_fields__.items(): + type_annotation = field.default.annotation if field.default is not None else Any # type: ignore + description = field.default.description if field.default is not None else None # type: ignore + title = field.default.title if field.default is not None else None # type: ignore + fields[name] = ( + type_annotation, + Field(..., title=title, description=description), + ) + + return fields + + +def create_combined_model( + model_name: str, + *field_sets: Dict[str, Tuple[Any, Field]], # type: ignore + filter_by_provider: Optional[str] = None, +) -> Type[BaseModel]: + """Create a combined pydantic model.""" + combined_fields = {} + for fields in field_sets: + for name, (type_annotation, field) in fields.items(): + if ( + filter_by_provider is None + or "openbb" in field.title # type: ignore + or (filter_by_provider in field.title) # type: ignore + ): + combined_fields[name] = (type_annotation, field) + + model = create_model(model_name, **combined_fields) # type: ignore + + # # Clean up the metadata + for field in model.model_fields.values(): + if hasattr(field, "metadata"): + field.metadata = None # type: ignore + + return model + + +def get_route_schema_map( + app: "BaseApp", + command_model_map: Dict[str, str], + filter_by_provider: Optional[str] = None, +) -> Dict[str, Dict[str, Any]]: + """Get the route schema map.""" + route_schema_map = {} + for route, model in command_model_map.items(): + input_model = create_combined_model( + route, + signature_to_fields(app, route), + dataclass_to_fields(model), + filter_by_provider=filter_by_provider, + ) + output_model = provider_interface.return_schema[model] + return_callable = get_route_callable(app, route) + + route_schema_map[route] = { + "input": input_model, + "output": output_model, + "callable": return_callable, + } + + return route_schema_map diff --git a/openbb_platform/core/openbb_core/api/router/system.py b/openbb_platform/core/openbb_core/api/router/system.py new file mode 100644 index 0000000000000000000000000000000000000000..0a4ce3abad69359bdacb92547cd7a6831d32cba2 --- /dev/null +++ b/openbb_platform/core/openbb_core/api/router/system.py @@ -0,0 +1,16 @@ +"""System router.""" + +from fastapi import APIRouter, Depends +from openbb_core.api.dependency.system import get_system_settings +from openbb_core.app.model.system_settings import SystemSettings +from typing_extensions import Annotated + +router = APIRouter(prefix="/system", tags=["System"]) + + +@router.get("") +async def get_system_model( + system_settings: Annotated[SystemSettings, Depends(get_system_settings)], +): + """Get system model.""" + return system_settings diff --git a/openbb_platform/core/openbb_core/api/router/user.py b/openbb_platform/core/openbb_core/api/router/user.py new file mode 100644 index 0000000000000000000000000000000000000000..063505164838c985cf71d1fdb17063bcc4f51d2b --- /dev/null +++ b/openbb_platform/core/openbb_core/api/router/user.py @@ -0,0 +1,18 @@ +"""OpenBB Platform API Account Router.""" + +from fastapi import APIRouter, Depends +from openbb_core.api.auth.user import authenticate_user, get_user_settings +from openbb_core.app.model.user_settings import UserSettings +from typing_extensions import Annotated + +router = APIRouter(prefix="/user", tags=["User"]) +auth_hook = authenticate_user +user_settings_hook = get_user_settings + + +@router.get("/me") +async def read_user_settings( + user_settings: Annotated[UserSettings, Depends(get_user_settings)] +): + """Read current user settings.""" + return user_settings diff --git a/openbb_platform/core/openbb_core/app/__init__.py b/openbb_platform/core/openbb_core/app/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..6b3de9cda26af37519313b63d2559d2c0753c9f0 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/__init__.py @@ -0,0 +1 @@ +"""OpenBB Core App Module.""" diff --git a/openbb_platform/core/openbb_core/app/command_runner.py b/openbb_platform/core/openbb_core/app/command_runner.py new file mode 100644 index 0000000000000000000000000000000000000000..766f2fd0aa9e5b85fca03056ee85d5dd3979a282 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/command_runner.py @@ -0,0 +1,512 @@ +"""Command runner module.""" + +# pylint: disable=R0903 + +from copy import deepcopy +from dataclasses import asdict, is_dataclass +from datetime import datetime +from inspect import Parameter, signature +from sys import exc_info +from time import perf_counter_ns +from typing import TYPE_CHECKING, Any, Callable, Dict, List, Optional, Tuple, Type +from warnings import catch_warnings, showwarning, warn + +from openbb_core.app.model.abstract.error import OpenBBError +from openbb_core.app.model.abstract.warning import OpenBBWarning, cast_warning +from openbb_core.app.model.metadata import Metadata +from openbb_core.app.model.obbject import OBBject +from openbb_core.app.provider_interface import ExtraParams +from openbb_core.app.static.package_builder import PathHandler +from openbb_core.env import Env +from openbb_core.provider.utils.helpers import maybe_coroutine, run_async +from pydantic import BaseModel, ConfigDict, create_model + +if TYPE_CHECKING: + from fastapi.routing import APIRoute + from openbb_core.app.model.system_settings import SystemSettings + from openbb_core.app.model.user_settings import UserSettings + from openbb_core.app.router import CommandMap + + +class ExecutionContext: + """Execution context.""" + + # For checking if the command specifies no validation in the API Route + _route_map = PathHandler.build_route_map() + + def __init__( + self, + command_map: "CommandMap", + route: str, + system_settings: "SystemSettings", + user_settings: "UserSettings", + ) -> None: + """Initialize the execution context.""" + self.command_map = command_map + self.route = route + self.system_settings = system_settings + self.user_settings = user_settings + + @property + def api_route(self) -> "APIRoute": + """API route.""" + return self._route_map[self.route] + + +class ParametersBuilder: + """Build parameters for a function.""" + + @staticmethod + def get_polished_parameter_list(func: Callable) -> List[Parameter]: + """Get the signature parameters values as a list.""" + sig = signature(func) + parameter_list = list(sig.parameters.values()) + + return parameter_list + + @staticmethod + def get_polished_func(func: Callable) -> Callable: + """Remove __authenticated_user_settings from the function signature and annotations.""" + func = deepcopy(func) + sig = signature(func) + parameter_map = dict(sig.parameters) + + if "__authenticated_user_settings" in parameter_map: + parameter_map.pop("__authenticated_user_settings") + + parameter_list = list(parameter_map.values()) + new_signature = signature(func).replace(parameters=parameter_list) + + func.__signature__ = new_signature # type: ignore + func.__annotations__ = parameter_map + + return func + + @classmethod + def merge_args_and_kwargs( + cls, + func: Callable, + args: Tuple[Any, ...], + kwargs: Dict[str, Any], + ) -> Dict[str, Any]: + """Merge args and kwargs into a single dict.""" + args = deepcopy(args) + kwargs = deepcopy(kwargs) + parameter_list = cls.get_polished_parameter_list(func=func) + parameter_map = {} + + for index, parameter in enumerate(parameter_list): + if index < len(args): + parameter_map[parameter.name] = args[index] + elif parameter.name in kwargs: + parameter_map[parameter.name] = kwargs[parameter.name] + elif parameter.default is not parameter.empty: + parameter_map[parameter.name] = parameter.default + else: + parameter_map[parameter.name] = None + + return parameter_map + + @staticmethod + def update_command_context( + func: Callable, + kwargs: Dict[str, Any], + system_settings: "SystemSettings", + user_settings: "UserSettings", + ) -> Dict[str, Any]: + """Update the command context with the available user and system settings.""" + # pylint: disable=import-outside-toplevel + from openbb_core.app.model.command_context import CommandContext + + argcount = func.__code__.co_argcount + if "cc" in func.__code__.co_varnames[:argcount]: + kwargs["cc"] = CommandContext( + user_settings=user_settings, + system_settings=system_settings, + ) + + return kwargs + + @staticmethod + def _warn_kwargs( + extra_params: Dict[str, Any], + model: Type[BaseModel], + ) -> None: + """Warn if kwargs received and ignored by the validation model.""" + # We only check the extra_params annotation because ignored fields + # will always be there + annotation = getattr( + model.model_fields.get("extra_params", None), "annotation", None + ) + if is_dataclass(annotation) and any( + t is ExtraParams for t in getattr(annotation, "__bases__", []) + ): + valid = asdict(annotation()) # type: ignore + for p in extra_params: + if "chart_params" in p: + continue + if p not in valid: + warn( + message=f"Parameter '{p}' not found.", + category=OpenBBWarning, + ) + + @staticmethod + def _as_dict(obj: Any) -> Dict[str, Any]: + """Safely convert an object to a dict.""" + try: + if isinstance(obj, dict): + return obj + return asdict(obj) if is_dataclass(obj) else dict(obj) # type: ignore + except Exception: + return {} + + @staticmethod + def validate_kwargs( + func: Callable, + kwargs: Dict[str, Any], + ) -> Dict[str, Any]: + """Validate kwargs and if possible coerce to the correct type.""" + sig = signature(func) + fields = { + n: ( + Any if p.annotation is Parameter.empty else p.annotation, + ... if p.default is Parameter.empty else p.default, + ) + for n, p in sig.parameters.items() + } + # We allow extra fields to return with model with 'cc: CommandContext' + config = ConfigDict(extra="allow", arbitrary_types_allowed=True) + # pylint: disable=C0103 + ValidationModel = create_model(func.__name__, __config__=config, **fields) # type: ignore + # Validate and coerce + model = ValidationModel(**kwargs) + ParametersBuilder._warn_kwargs( + ParametersBuilder._as_dict(kwargs.get("extra_params", {})), + ValidationModel, + ) + return dict(model) + + # pylint: disable=R0913 + @classmethod + def build( + cls, + args: Tuple[Any, ...], + execution_context: ExecutionContext, + func: Callable, + kwargs: Dict[str, Any], + ) -> Dict[str, Any]: + """Build the parameters for a function.""" + func = cls.get_polished_func(func=func) + system_settings = execution_context.system_settings + user_settings = execution_context.user_settings + + kwargs = cls.merge_args_and_kwargs( + func=func, + args=args, + kwargs=kwargs, + ) + kwargs = cls.update_command_context( + func=func, + kwargs=kwargs, + system_settings=system_settings, + user_settings=user_settings, + ) + kwargs = cls.validate_kwargs( + func=func, + kwargs=kwargs, + ) + return kwargs + + +# pylint: disable=too-few-public-methods +class StaticCommandRunner: + """Static Command Runner.""" + + @classmethod + async def _command( + cls, + func: Callable, + kwargs: Dict[str, Any], + show_warnings: bool = True, # pylint: disable=unused-argument # type: ignore + ) -> OBBject: + """Run a command and return the output.""" + obbject = await maybe_coroutine(func, **kwargs) + if isinstance(obbject, OBBject): + obbject.provider = getattr( + kwargs.get("provider_choices"), + "provider", + getattr(obbject, "provider", None), + ) + return obbject + + @classmethod + def _chart( + cls, + obbject: OBBject, + **kwargs, + ) -> None: + """Create a chart from the command output.""" + try: + if "charting" not in obbject.accessors: + raise OpenBBError( + "Charting is not installed. Please install `openbb-charting`." + ) + # Here we will pop the chart_params kwargs and flatten them into the kwargs. + chart_params = {} + extra_params = getattr(obbject, "_extra_params", {}) + + if extra_params and "chart_params" in extra_params: + chart_params = extra_params.get("chart_params", {}) + + if kwargs.get("chart_params"): + chart_params.update(kwargs.pop("chart_params", {})) + # Verify that kwargs is not nested as kwargs so we don't miss any chart params. + if ( + "kwargs" in kwargs + and "chart_params" in kwargs["kwargs"] + and kwargs["kwargs"].get("chart_params") + ): + chart_params.update(kwargs.pop("kwargs", {}).get("chart_params", {})) + + if chart_params: + kwargs.update(chart_params) + obbject.charting.show(render=False, **kwargs) # type: ignore[attr-defined] + except Exception as e: # pylint: disable=broad-exception-caught + if Env().DEBUG_MODE: + raise OpenBBError(e) from e + warn(str(e), OpenBBWarning) + + @classmethod + def _extract_params(cls, kwargs, key) -> Dict: + """Extract params models from kwargs and convert to a dictionary.""" + params = kwargs.get(key, {}) + if hasattr(params, "__dict__"): + return params.__dict__ + return params + + # pylint: disable=R0913, R0914 + @classmethod + async def _execute_func( # pylint: disable=too-many-positional-arguments + cls, + route: str, + args: Tuple[Any, ...], + execution_context: ExecutionContext, + func: Callable, + kwargs: Dict[str, Any], + ) -> OBBject: + """Execute a function and return the output.""" + # pylint: disable=import-outside-toplevel + from openbb_core.app.logs.logging_service import LoggingService + + user_settings = execution_context.user_settings + system_settings = execution_context.system_settings + raised_warnings: list = [] + custom_headers: Optional[dict[str, Any]] = None + + try: + with catch_warnings(record=True) as warning_list: + # If we're on Jupyter we need to pop here because we will lose "chart" after + # ParametersBuilder.build. This needs to be fixed in a way that chart is + # added to the function signature and shared for jupyter and api + # We can check in the router decorator if the given function has a chart + # in the charting extension then we add it there. This way we can remove + # the chart parameter from the commands.py and package_builder, it will be + # added to the function signature in the router decorator + chart = kwargs.pop("chart", False) + + kwargs = ParametersBuilder.build( + args=args, + execution_context=execution_context, + func=func, + kwargs=kwargs, + ) + kwargs = kwargs if kwargs is not None else {} + # If we're on the api we need to remove "chart" here because the parameter is added on + # commands.py and the function signature does not expect "chart" + kwargs.pop("chart", None) + # We also pop custom headers + model_headers = system_settings.api_settings.custom_headers or {} + custom_headers = { + name: kwargs.pop(name.replace("-", "_"), default) + for name, default in model_headers.items() or {} + } or None + + obbject = await cls._command(func, kwargs) + # The output might be from a router command with 'no_validate=True' + # It might be of a different type than OBBject. + # In this case, we avoid accessing those attributes. + if isinstance(obbject, OBBject): + # This section prepares the obbject to pass to the charting service. + obbject._route = route # pylint: disable=protected-access + std_params = cls._extract_params(kwargs, "standard_params") or ( + kwargs if "data" in kwargs else {} + ) + extra_params = cls._extract_params(kwargs, "extra_params") + obbject._standard_params = ( # pylint: disable=protected-access + std_params + ) + obbject._extra_params = ( # pylint: disable=protected-access + extra_params + ) + if chart and obbject.results: + cls._chart(obbject, **kwargs) + + raised_warnings = warning_list if warning_list else [] + finally: + if raised_warnings: + if isinstance(obbject, OBBject): + obbject.warnings = [] + for w in raised_warnings: + if isinstance(obbject, OBBject): + obbject.warnings.append(cast_warning(w)) + if user_settings.preferences.show_warnings: + showwarning( + message=w.message, + category=w.category, + filename=w.filename, + lineno=w.lineno, + file=w.file, + line=w.line, + ) + ls = LoggingService(system_settings, user_settings) + ls.log( + user_settings=user_settings, + system_settings=system_settings, + route=route, + func=func, + kwargs=kwargs, + exec_info=exc_info(), + custom_headers=custom_headers, + ) + + return obbject + + # pylint: disable=W0718 + @classmethod + async def run( + cls, + execution_context: ExecutionContext, + /, + *args, + **kwargs, + ) -> OBBject: + """Run a command and return the OBBject as output.""" + timestamp = datetime.now() + start_ns = perf_counter_ns() + + command_map = execution_context.command_map + route = execution_context.route + + if func := command_map.get_command(route=route): + obbject = await cls._execute_func( + route=route, + args=args, # type: ignore + execution_context=execution_context, + func=func, + kwargs=kwargs, + ) + else: + raise AttributeError(f"Invalid command : route={route}") + + duration = perf_counter_ns() - start_ns + + if execution_context.user_settings.preferences.metadata and isinstance( + obbject, OBBject + ): + try: + obbject.extra["metadata"] = Metadata( + arguments=kwargs, + duration=duration, + route=route, + timestamp=timestamp, + ) + except Exception as e: + if Env().DEBUG_MODE: + raise OpenBBError(e) from e + warn(str(e), OpenBBWarning) + + return obbject + + +class CommandRunner: + """Command runner.""" + + def __init__( + self, + command_map: Optional["CommandMap"] = None, + system_settings: Optional["SystemSettings"] = None, + user_settings: Optional["UserSettings"] = None, + ) -> None: + """Initialize the command runner.""" + # pylint: disable=import-outside-toplevel + from openbb_core.app.router import CommandMap + from openbb_core.app.service.system_service import SystemService + from openbb_core.app.service.user_service import UserService + + self._command_map = command_map or CommandMap() + self._system_settings = system_settings or SystemService().system_settings + self._user_settings = user_settings or UserService.read_from_file() + + def init_logging_service(self) -> None: + """Initialize the logging service.""" + # pylint: disable=import-outside-toplevel + from openbb_core.app.logs.logging_service import LoggingService + + _ = LoggingService( + system_settings=self._system_settings, user_settings=self._user_settings + ) + + @property + def command_map(self) -> "CommandMap": + """Command map.""" + return self._command_map + + @property + def system_settings(self) -> "SystemSettings": + """System settings.""" + return self._system_settings + + @property + def user_settings(self) -> "UserSettings": + """User settings.""" + return self._user_settings + + @user_settings.setter + def user_settings(self, user_settings: "UserSettings") -> None: + self._user_settings = user_settings + + # pylint: disable=W1113 + async def run( + self, + route: str, + user_settings: Optional["UserSettings"] = None, + /, + *args, + **kwargs, + ) -> OBBject: + """Run a command and return the OBBject as output.""" + # pylint: disable=import-outside-toplevel + + self._user_settings = user_settings or self._user_settings + + execution_context = ExecutionContext( + command_map=self._command_map, + route=route, + system_settings=self._system_settings, + user_settings=self._user_settings, + ) + + return await StaticCommandRunner.run(execution_context, *args, **kwargs) + + # pylint: disable=W1113 + def sync_run( + self, + route: str, + user_settings: Optional["UserSettings"] = None, + /, + *args, + **kwargs, + ) -> OBBject: + """Run a command and return the OBBject as output.""" + return run_async(self.run, route, user_settings, *args, **kwargs) diff --git a/openbb_platform/core/openbb_core/app/constants.py b/openbb_platform/core/openbb_core/app/constants.py new file mode 100644 index 0000000000000000000000000000000000000000..91996980b238973e142bc83a2298214e58e02013 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/constants.py @@ -0,0 +1,8 @@ +"""Constants for the OpenBB Platform.""" + +from pathlib import Path + +HOME_DIRECTORY = Path.home() +OPENBB_DIRECTORY = Path(HOME_DIRECTORY, ".openbb_platform") +USER_SETTINGS_PATH = Path(OPENBB_DIRECTORY, "user_settings.json") +SYSTEM_SETTINGS_PATH = Path(OPENBB_DIRECTORY, "system_settings.json") diff --git a/openbb_platform/core/openbb_core/app/deprecation.py b/openbb_platform/core/openbb_core/app/deprecation.py new file mode 100644 index 0000000000000000000000000000000000000000..a771589913a8588ddeba5e8998391759e9427586 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/deprecation.py @@ -0,0 +1,65 @@ +""" +OpenBB-specific deprecation warnings. + +This implementation was inspired from Pydantic's specific warnings and modified to suit OpenBB's needs. +""" + +from typing import Optional, Tuple + +from openbb_core.app.version import VERSION, get_major_minor + + +class DeprecationSummary(str): + """A string subclass that can be used to store deprecation metadata.""" + + def __new__(cls, value: str, metadata: DeprecationWarning): + """Create a new instance of the class.""" + obj = str.__new__(cls, value) + setattr(obj, "metadata", metadata) + return obj + + +class OpenBBDeprecationWarning(DeprecationWarning): + """ + A OpenBB specific deprecation warning. + + This warning is raised when using deprecated functionality in OpenBB. It provides information on when the + deprecation was introduced and the expected version in which the corresponding functionality will be removed. + + Attributes + ---------- + message: Description of the warning. + since: Version in what the deprecation was introduced. + expected_removal: Version in what the corresponding functionality expected to be removed. + """ + + # The choice to use class variables is based on the potential for extending the class in future developments. + # Example: launching Platform V5 and decide to create a subclimagine we areass named OpenBBDeprecatedSinceV4, + # which inherits from OpenBBDeprecationWarning. In this subclass, we would set since=4.X and expected_removal=5.0. + # It's important for these values to be defined at the class level, rather than just at the instance level, + # to ensure consistency and clarity in our deprecation warnings across the platform. + + message: str + since: Tuple[int, int] + expected_removal: Tuple[int, int] + + def __init__( + self, + message: str, + *args: object, + since: Optional[Tuple[int, int]] = None, + expected_removal: Optional[Tuple[int, int]] = None, + ) -> None: + """Initialize the warning.""" + super().__init__(message, *args) + self.message = message.rstrip(".") + self.since = since or get_major_minor(VERSION) + self.expected_removal = expected_removal or (self.since[0] + 1, 0) + self.long_message = ( + f"{self.message}. Deprecated in OpenBB Platform V{self.since[0]}.{self.since[1]}" + f" to be removed in V{self.expected_removal[0]}.{self.expected_removal[1]}." + ) + + def __str__(self) -> str: + """Return the warning message.""" + return self.long_message diff --git a/openbb_platform/core/openbb_core/app/extension_loader.py b/openbb_platform/core/openbb_core/app/extension_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..9dd9bb1dc6bb08ff94928fbf6c7dac3fabcc0af2 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/extension_loader.py @@ -0,0 +1,177 @@ +"""Extension Loader.""" + +from enum import Enum +from functools import lru_cache +from typing import TYPE_CHECKING, Any, Dict, List, Optional + +from importlib_metadata import EntryPoint, EntryPoints, entry_points +from openbb_core.app.model.abstract.singleton import SingletonMeta +from openbb_core.app.model.extension import Extension + +if TYPE_CHECKING: + from openbb_core.app.router import Router + from openbb_core.provider.abstract.provider import Provider + + +class OpenBBGroups(Enum): + """OpenBB Extension Groups.""" + + core = "openbb_core_extension" + provider = "openbb_provider_extension" + obbject = "openbb_obbject_extension" + + @staticmethod + def groups() -> List[str]: + """Return the OpenBBGroups.""" + return [ + OpenBBGroups.core.value, + OpenBBGroups.provider.value, + OpenBBGroups.obbject.value, + ] + + +class ExtensionLoader(metaclass=SingletonMeta): + """Extension loader class.""" + + def __init__( + self, + ) -> None: + """Initialize the extension loader.""" + self._obbject_entry_points: EntryPoints = self._sorted_entry_points( + group=OpenBBGroups.obbject.value + ) + self._core_entry_points: EntryPoints = self._sorted_entry_points( + group=OpenBBGroups.core.value + ) + self._provider_entry_points: EntryPoints = self._sorted_entry_points( + group=OpenBBGroups.provider.value + ) + self._obbject_objects: Dict[str, Extension] = {} + self._core_objects: Dict[str, Router] = {} + self._provider_objects: Dict[str, Provider] = {} + + @property + def obbject_entry_points(self) -> EntryPoints: + """Return the obbject entry points.""" + return self._obbject_entry_points + + @property + def core_entry_points(self) -> EntryPoints: + """Return the core entry points.""" + return self._core_entry_points + + @property + def provider_entry_points(self) -> EntryPoints: + """Return the provider entry points.""" + return self._provider_entry_points + + @property + def entry_points(self) -> List[EntryPoints]: + """Return the entry points.""" + return [ + self._core_entry_points, + self._provider_entry_points, + self._obbject_entry_points, + ] + + @staticmethod + def _get_entry_point( + entry_points_: EntryPoints, ext_name: str + ) -> Optional[EntryPoint]: + """Given an extension name and a list of entry points, return the corresponding entry point.""" + return next((ep for ep in entry_points_ if ep.name == ext_name), None) + + def get_obbject_entry_point(self, ext_name: str) -> Optional[EntryPoint]: + """Given an extension name, return the corresponding entry point.""" + return self._get_entry_point(self._obbject_entry_points, ext_name) + + def get_core_entry_point(self, ext_name: str) -> Optional[EntryPoint]: + """Given an extension name, return the corresponding entry point.""" + return self._get_entry_point(self._core_entry_points, ext_name) + + def get_provider_entry_point(self, ext_name: str) -> Optional[EntryPoint]: + """Given an extension name, return the corresponding entry point.""" + return self._get_entry_point(self._provider_entry_points, ext_name) + + @property + @lru_cache + def obbject_objects(self) -> Dict[str, Extension]: + """Return a dict of obbject extension objects.""" + self._obbject_objects = self._load_entry_points( + self._obbject_entry_points, OpenBBGroups.obbject + ) + return self._obbject_objects + + @property + @lru_cache + def core_objects(self) -> Dict[str, "Router"]: + """Return a dict of core extension objects.""" + self._core_objects = self._load_entry_points( + self._core_entry_points, OpenBBGroups.core + ) + return self._core_objects + + @property + @lru_cache + def provider_objects(self) -> Dict[str, "Provider"]: + """Return a dict of provider extension objects.""" + self._provider_objects = self._load_entry_points( + self._provider_entry_points, OpenBBGroups.provider + ) + return self._provider_objects + + @staticmethod + def _sorted_entry_points(group: str) -> EntryPoints: + """Return a sorted dictionary of entry points.""" + return sorted(entry_points(group=group)) # type: ignore + + def _load_entry_points( + self, entry_points_: EntryPoints, group: OpenBBGroups + ) -> Dict[str, Any]: + """Return a dict of objects matching the entry points.""" + + def load_obbject(eps: EntryPoints) -> Dict[str, Extension]: + """ + Return a dictionary of obbject objects. + + Keys are entry point names and values are instances of the Extension class. + """ + return { + ep.name: entry + for ep in eps + if isinstance((entry := ep.load()), Extension) + } + + def load_core(eps: EntryPoints) -> Dict[str, "Router"]: + """Return a dictionary of core objects.""" + # pylint: disable=import-outside-toplevel + from openbb_core.app.router import Router + + return { + ep.name: entry for ep in eps if isinstance((entry := ep.load()), Router) + } + + def load_provider(eps: EntryPoints) -> Dict[str, "Provider"]: + """ + Return a dictionary of provider objects. + + Keys are entry point names and values are instances of the Provider class. + """ + # pylint: disable=import-outside-toplevel + from openbb_core.provider.abstract.provider import Provider + + entries: dict = {} + for ep in eps: + try: + if isinstance((entry := ep.load()), Provider): + entries[ep.name] = entry + except ModuleNotFoundError: + continue + return entries + + func = { + OpenBBGroups.obbject: load_obbject, + OpenBBGroups.core: load_core, + OpenBBGroups.provider: load_provider, + } + return func[group](entry_points_) # type: ignore diff --git a/openbb_platform/core/openbb_core/app/logs/formatters/formatter_with_exceptions.py b/openbb_platform/core/openbb_core/app/logs/formatters/formatter_with_exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..81c829c91571ab9d467083f545c117d5bd0f8329 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/logs/formatters/formatter_with_exceptions.py @@ -0,0 +1,208 @@ +"""Logging Formatter that includes formatting of Exceptions.""" + +import logging + +from openbb_core.app.logs.models.logging_settings import LoggingSettings + + +class FormatterWithExceptions(logging.Formatter): + """Logging Formatter that includes formatting of Exceptions.""" + + DATEFORMAT = "%Y-%m-%dT%H:%M:%S%z" + LOGFORMAT = "%(asctime)s|%(name)s|%(funcName)s|%(lineno)s|%(message)s" + LOGPREFIXFORMAT = ( + "%(levelname)s|%(appName)s|%(commitHash)s|%(appId)s|%(sessionId)s|%(userId)s|" + ) + + @staticmethod + def calculate_level_name(record: logging.LogRecord) -> str: + """Calculate the level name of the log record.""" + if record.exc_text: + level_name = "X" + elif record.levelname: + level_name = record.levelname[0] + else: + level_name = "U" + + return level_name + + @staticmethod + def extract_log_extra(record: logging.LogRecord): + """Extract extra log information from the record.""" + log_extra = dict() + + if hasattr(record, "func_name_override"): + record.funcName = record.func_name_override # type: ignore + record.lineno = 0 + + if hasattr(record, "session_id"): + log_extra["sessionId"] = record.session_id # type: ignore + + return log_extra + + @staticmethod + def mock_ipv4(text: str) -> str: + """Mock IPv4 addresses in the text.""" + # pylint: disable=import-outside-toplevel + import re + + pattern = r"\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}" + replacement = " FILTERED_IP " + text_mocked = re.sub(pattern, replacement, text) + + return text_mocked + + @staticmethod + def mock_email(text: str) -> str: + """Mock email addresses in the text.""" + # pylint: disable=import-outside-toplevel + import re + + pattern = r"\b[A-Za-z0-9._%+-]+@[A-Za-z0-9.-]+\.[A-Z|a-z]{2,}\b" + replacement = " FILTERED_EMAIL " + text_mocked = re.sub(pattern, replacement, text) + + return text_mocked + + @staticmethod + def mock_password(text: str) -> str: + """Mock passwords in the text.""" + # pylint: disable=import-outside-toplevel + import re + + pattern = r'("password": ")[^"]+' + replacement = r"\1 FILTERED_PASSWORD " + text_mocked = re.sub(pattern, replacement, text) + return text_mocked + + @staticmethod + def mock_flair(text: str) -> str: + """Mock flair in the text.""" + # pylint: disable=import-outside-toplevel + import re + + pattern = r'("FLAIR": "\[)(.*?)\]' + replacement = r"\1 FILTERED_FLAIR ]" + text_mocked = re.sub(pattern, replacement, text) + + return text_mocked + + @staticmethod + def mock_home_directory(text: str) -> str: + """Mock home directory in the text.""" + # pylint: disable=import-outside-toplevel + from pathlib import Path + + user_home_directory = str(Path.home().as_posix()) + text_mocked = text.replace("\\", "/").replace( + user_home_directory, "MOCKING_USER_PATH" + ) + + return text_mocked + + @staticmethod + def filter_special_tags(text: str) -> str: + """Filter special tags in the text.""" + text_filtered = text.replace("\n", " MOCKING_BREAKLINE ") + text_filtered = text_filtered.replace("'Traceback", "Traceback") + + return text_filtered + + @classmethod + def filter_piis(cls, text: str) -> str: + """Filter Personally Identifiable Information in the text.""" + text_filtered = cls.mock_ipv4(text=text) + text_filtered = cls.mock_email(text=text_filtered) + text_filtered = cls.mock_password(text=text_filtered) + text_filtered = cls.mock_home_directory(text=text_filtered) + text_filtered = cls.mock_flair(text=text_filtered) + + return text_filtered + + @classmethod + def filter_log_line(cls, text: str): + """Filter log line.""" + text_filtered = cls.filter_special_tags(text=text) + text_filtered = cls.filter_piis(text=text_filtered) + + return text_filtered + + # OVERRIDE + def __init__( + self, + settings: LoggingSettings, + style="%", + validate=True, + ) -> None: + """Initialize the FormatterWithExceptions.""" + super().__init__( + fmt=self.LOGFORMAT, + datefmt=self.DATEFORMAT, + style=style, + validate=validate, + ) + self.settings = settings + + @property + def settings(self) -> LoggingSettings: + """Get the settings.""" + # pylint: disable=import-outside-toplevel + from copy import deepcopy + + return deepcopy(self.__settings) + + @settings.setter + def settings(self, settings: LoggingSettings) -> None: + """Set the settings.""" + self.__settings = settings + + # OVERRIDE + def formatException(self, ei) -> str: + """Define the Exception formatting handler. + + Parameters + ---------- + ei : logging._SysExcInfoType + Exception to be logged + Returns + ---------- + str + Formatted exception + """ + result = super().formatException(ei) + return repr(result) + + # OVERRIDE + def format(self, record: logging.LogRecord) -> str: + """Define the Log formatter. + + Parameters + ---------- + record : logging.LogRecord + Logging record + Returns + ---------- + str + Formatted_log message + """ + level_name = self.calculate_level_name(record=record) + log_prefix_content = { + "appName": self.settings.app_name, + "levelname": level_name, + "appId": self.settings.app_id, + "sessionId": self.settings.session_id, + "commitHash": "unknown-commit", + "userId": self.settings.user_id, + } + + log_extra = self.extract_log_extra(record=record) + log_prefix_content = {**log_prefix_content, **log_extra} + log_prefix = self.LOGPREFIXFORMAT % log_prefix_content + + record.msg = record.msg.replace("|", "-MOCK_PIPE-") + + log_line = super().format(record) + log_line = self.filter_log_line(text=log_line) + log_line_full = log_prefix + log_line + + return log_line_full diff --git a/openbb_platform/core/openbb_core/app/logs/handlers/path_tracking_file_handler.py b/openbb_platform/core/openbb_core/app/logs/handlers/path_tracking_file_handler.py new file mode 100644 index 0000000000000000000000000000000000000000..6b7ab2a55ba5216f8160b939e11d8de447b0e91c --- /dev/null +++ b/openbb_platform/core/openbb_core/app/logs/handlers/path_tracking_file_handler.py @@ -0,0 +1,86 @@ +"""Path Tracking File Handler.""" + +# IMPORTATION STANDARD +from copy import deepcopy +from logging.handlers import TimedRotatingFileHandler +from pathlib import Path + +# IMPORTATION THIRD PARTY +# IMPORTATION INTERNAL +from openbb_core.app.logs.models.logging_settings import LoggingSettings +from openbb_core.app.logs.utils.expired_files import ( + get_expired_file_list, + get_timestamp_from_x_days, + remove_file_list, +) + +ARCHIVES_FOLDER_NAME = "archives" +TMP_FOLDER_NAME = "tmp" + + +class PathTrackingFileHandler(TimedRotatingFileHandler): + """Path Tracking File Handler.""" + + @staticmethod + def build_log_file_path(settings: LoggingSettings) -> Path: + """Build the log file path.""" + app_name = settings.app_name + directory = settings.user_logs_directory + session_id = settings.session_id + + path = directory.absolute().joinpath(f"{app_name}_{session_id}") + return path + + def clean_expired_files(self, before_timestamp: float): + """Remove expired files from logs directory.""" + logs_dir = self.settings.user_logs_directory + archives_directory = logs_dir / ARCHIVES_FOLDER_NAME + tmp_directory = logs_dir / TMP_FOLDER_NAME + + expired_logs_file_list = get_expired_file_list( + directory=logs_dir, + before_timestamp=before_timestamp, + ) + expired_archives_file_list = get_expired_file_list( + directory=archives_directory, + before_timestamp=before_timestamp, + ) + expired_tmp_file_list = get_expired_file_list( + directory=tmp_directory, + before_timestamp=before_timestamp, + ) + remove_file_list(file_list=expired_logs_file_list) + remove_file_list(file_list=expired_archives_file_list) + remove_file_list(file_list=expired_tmp_file_list) + + @property + def settings(self) -> LoggingSettings: + """Get the settings.""" + return deepcopy(self.__settings) + + @settings.setter + def settings(self, settings: LoggingSettings) -> None: + """Set the settings.""" + self.__settings = settings + + # OVERRIDE + def __init__( + self, + settings: LoggingSettings, + *args, + **kwargs, + ) -> None: + """Initialize the PathTrackingFileHandler.""" + # SETUP PARENT CLASS + filename = str(self.build_log_file_path(settings=settings)) + frequency = settings.frequency + kwargs["when"] = frequency + + super().__init__(filename, *args, **kwargs) + + self.suffix += ".log" + + # SETUP CURRENT CLASS + self.__settings = settings + + self.clean_expired_files(before_timestamp=get_timestamp_from_x_days(x=5)) diff --git a/openbb_platform/core/openbb_core/app/logs/handlers/posthog_handler.py b/openbb_platform/core/openbb_core/app/logs/handlers/posthog_handler.py new file mode 100644 index 0000000000000000000000000000000000000000..edea4151df05f6e834f236f7a12d9d888c35a5ca --- /dev/null +++ b/openbb_platform/core/openbb_core/app/logs/handlers/posthog_handler.py @@ -0,0 +1,151 @@ +"""Posthog Handler.""" + +import json +import logging +from enum import Enum +from typing import Any, Dict + +import posthog +from openbb_core.app.logs.formatters.formatter_with_exceptions import ( + FormatterWithExceptions, +) +from openbb_core.app.logs.models.logging_settings import LoggingSettings +from openbb_core.env import Env + + +class PosthogHandler(logging.Handler): + """Posthog Handler.""" + + class AllowedEvents(Enum): + """Allowed Posthog Events.""" + + log_startup = "log_startup" + log_cmd = "log_cmd" + log_error = "log_error" + log_warning = "log_warning" + + def __init__(self, settings: LoggingSettings): + """Initialize Posthog Handler.""" + # pylint: disable=import-outside-toplevel + from openbb_core.provider.utils.helpers import get_requests_session + + super().__init__() + self._settings = settings + self.logged_in = False + posthog.api_key = "phc_6FXLqu4uW9yxfyN8DpPdgzCdlYXOmIWdMGh6GnBgJLX" # pragma: allowlist secret # noqa + posthog.host = "https://app.posthog.com" # noqa + posthog.request._session = ( # pylint: disable=protected-access + get_requests_session() + ) + + @property + def settings(self) -> LoggingSettings: + """Get logging settings.""" + # pylint: disable=import-outside-toplevel + from copy import deepcopy + + return deepcopy(self._settings) + + @settings.setter + def settings(self, settings: LoggingSettings) -> None: + """Set logging settings.""" + self._settings = settings + + def emit(self, record: logging.LogRecord): + """Emit log record.""" + try: + self.send(record=record) + except Exception as e: + self.handleError(record) + if Env().DEBUG_MODE: + raise e + + def distinct_id(self) -> str: + """Get distinct id.""" + return self._settings.user_id or self._settings.app_id + + def identify(self) -> None: + """Identify user.""" + if self.logged_in or not self._settings.user_id: + return + + posthog.identify( + self._settings.user_id, + { + "email": self._settings.user_email, + "primaryUsage": self._settings.user_primary_usage, + }, + ) + + if self._settings.sub_app_name == "pro": + return + + self.logged_in = True + posthog.alias(self._settings.user_id, self._settings.app_id) + + def log_to_dict(self, log_info: str) -> dict: + """Log to dict.""" + # pylint: disable=import-outside-toplevel + import re + + log_regex = r"(STARTUP|CMD|ERROR): (.*)" + log_dict: Dict[str, Any] = {} + + for log in re.findall(log_regex, log_info): + log_dict[log[0]] = json.loads(log[1]) + + return log_dict + + def send(self, record: logging.LogRecord): + """Send log record to Posthog.""" + # pylint: disable=import-outside-toplevel + import re + + level_name = logging.getLevelName(record.levelno) + log_line = FormatterWithExceptions.filter_log_line(text=record.getMessage()) + + log_extra = self.extract_log_extra(record=record) + log_extra.update(dict(level=level_name, message=log_line)) + event_name = f"log_{level_name.lower()}" + + if log_dict := self.log_to_dict(log_info=log_line): + event_name = f"log_{list(log_dict.keys())[0].lower()}" + log_dict = log_dict.get("STARTUP", log_dict) + + log_extra = {**log_extra, **log_dict} + log_extra.pop("message", None) + + if re.match(r"^(QUEUE|START|END|INPUT:)", log_line) and not log_dict: + return + + if event_name not in [e.value for e in self.AllowedEvents]: + return + + self.identify() + posthog.capture(self.distinct_id(), event_name, properties=log_extra) + + def extract_log_extra(self, record: logging.LogRecord) -> Dict[str, Any]: + """Extract log extra from record.""" + log_extra: Dict[str, Any] = { + "appName": self._settings.app_name, + "subAppName": self._settings.sub_app_name, + "appId": self._settings.app_id, + "sessionId": self._settings.session_id, + "platform": self._settings.platform, + "pythonVersion": self._settings.python_version, + "obbPlatformVersion": self._settings.platform_version, + } + + if self._settings.user_id: + log_extra["userId"] = self._settings.user_id + + if hasattr(record, "extra"): + log_extra = {**log_extra, **record.extra} # type: ignore + + if record.exc_info: + log_extra["exception"] = { + "type": str(record.exc_info[0]), + "value": str(record.exc_info[1]), + } + + return log_extra diff --git a/openbb_platform/core/openbb_core/app/logs/handlers_manager.py b/openbb_platform/core/openbb_core/app/logs/handlers_manager.py new file mode 100644 index 0000000000000000000000000000000000000000..deeaff8d5e5e40661e9d590cf714677fef1f5235 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/logs/handlers_manager.py @@ -0,0 +1,89 @@ +"""Handlers Manager.""" + +import logging +import sys + +from openbb_core.app.logs.formatters.formatter_with_exceptions import ( + FormatterWithExceptions, +) +from openbb_core.app.logs.handlers.path_tracking_file_handler import ( + PathTrackingFileHandler, +) +from openbb_core.app.logs.handlers.posthog_handler import PosthogHandler +from openbb_core.app.logs.models.logging_settings import LoggingSettings + + +class HandlersManager: + """Handlers Manager.""" + + def __init__(self, logger: logging.Logger, settings: LoggingSettings): + """Initialize the HandlersManager.""" + self._logger = logger + self._handlers = settings.handler_list + self._settings = settings + + def setup(self): + """Set the logger handlers and settings.""" + # Disable propagation to root logger to avoid duplicate logs + self._logger.propagate = False + self._logger.setLevel(self._settings.verbosity) + + for handler_type in self._handlers: + if handler_type == "stdout": + self._add_stdout_handler() + elif handler_type == "stderr": + self._add_stderr_handler() + elif handler_type == "noop": + self._add_noop_handler() + elif handler_type == "file" and not self._settings.logging_suppress: + self._add_file_handler() + elif handler_type == "posthog" and not self._settings.logging_suppress: + self._add_posthog_handler() + else: + self._logger.debug("Unknown log handler.") + + def _add_posthog_handler(self): + """Add a Posthog handler.""" + handler = PosthogHandler(settings=self._settings) + formatter = FormatterWithExceptions(settings=self._settings) + handler.setFormatter(formatter) + self._logger.addHandler(handler) + + def _add_stdout_handler(self): + """Add a stdout handler.""" + handler = logging.StreamHandler(sys.stdout) + formatter = FormatterWithExceptions(settings=self._settings) + handler.setFormatter(formatter) + self._logger.addHandler(handler) + + def _add_stderr_handler(self): + """Add a stderr handler.""" + handler = logging.StreamHandler(sys.stderr) + formatter = FormatterWithExceptions(settings=self._settings) + handler.setFormatter(formatter) + self._logger.addHandler(handler) + + def _add_noop_handler(self): + """Add a null handler.""" + handler = logging.NullHandler() + formatter = FormatterWithExceptions(settings=self._settings) + handler.setFormatter(formatter) + self._logger.addHandler(handler) + + def _add_file_handler(self): + """Add a file handler.""" + handler = PathTrackingFileHandler(settings=self._settings) + formatter = FormatterWithExceptions(settings=self._settings) + handler.setFormatter(formatter) + self._logger.addHandler(handler) + + def update_handlers(self, settings: LoggingSettings): + """Update the handlers with new settings.""" + logger = self._logger + for hdlr in logger.handlers: + if ( + isinstance(hdlr, (PathTrackingFileHandler, PosthogHandler)) + and not settings.logging_suppress + ): + hdlr.settings = settings + hdlr.formatter.settings = settings # type: ignore diff --git a/openbb_platform/core/openbb_core/app/logs/logging_service.py b/openbb_platform/core/openbb_core/app/logs/logging_service.py new file mode 100644 index 0000000000000000000000000000000000000000..076eda4d879bd5441ef61008f430007bc0aa5054 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/logs/logging_service.py @@ -0,0 +1,260 @@ +"""Logging Service Module.""" + +import json +import logging +from enum import Enum +from types import TracebackType +from typing import Any, Callable, Dict, Optional, Tuple, Type, Union + +from openbb_core.app.logs.formatters.formatter_with_exceptions import ( + FormatterWithExceptions, +) +from openbb_core.app.logs.handlers_manager import HandlersManager +from openbb_core.app.logs.models.logging_settings import LoggingSettings +from openbb_core.app.model.abstract.singleton import SingletonMeta +from openbb_core.app.model.system_settings import SystemSettings +from openbb_core.app.model.user_settings import UserSettings +from pydantic import BaseModel +from pydantic_core import to_jsonable_python + + +class DummyProvider(BaseModel): + """Dummy Provider for error handling with logs.""" + + provider: str = "not_passed_to_kwargs" + + +class LoggingService(metaclass=SingletonMeta): + """Logging Service class responsible for managing logging settings and handling logs. + + Attributes + ---------- + _user_settings : Optional[UserSettings] + User Settings object. + _system_settings : Optional[SystemSettings] + System Settings object. + _logging_settings : LoggingSettings + LoggingSettings object containing the current logging settings. + _handlers_manager : HandlersManager + HandlersManager object managing logging handlers. + + Methods + ------- + __init__(system_settings, user_settings) + Logging Manager Constructor. + + log(user_settings, system_settings, obbject, route, func, kwargs) + Log command output and relevant information. + + logging_settings + Property to access the current logging settings. + + logging_settings.setter + Setter method to update the logging settings. + + _setup_handlers() + Setup Logging Handlers. + + _log_startup() + Log startup information. + """ + + _logger = logging.getLogger("openbb.logging_service") + + def __init__( + self, + system_settings: SystemSettings, + user_settings: UserSettings, + ) -> None: + """Define the Logging Service Constructor. + + Sets up the logging settings and handlers and then logs the startup information. + + Parameters + ---------- + system_settings : SystemSettings + System Settings, by default None + user_settings : UserSettings + User Settings, by default None + """ + self._user_settings = user_settings + self._system_settings = system_settings + self._logging_settings = LoggingSettings( + user_settings=self._user_settings, + system_settings=self._system_settings, + ) + self._handlers_manager = self._setup_handlers() + self._log_startup() + + @property + def logging_settings(self) -> LoggingSettings: + """Define the Current logging settings. + + Returns + ------- + LoggingSettings + LoggingSettings object containing the current logging settings. + """ + return self._logging_settings + + @logging_settings.setter + def logging_settings(self, value: Tuple[SystemSettings, UserSettings]): + """Define the Setter for updating the logging settings. + + Parameters + ---------- + value : Tuple[SystemSettings, UserSettings] + Tuple containing updated SystemSettings and UserSettings. + """ + system_settings, user_settings = value + self._logging_settings = LoggingSettings( + user_settings=user_settings, + system_settings=system_settings, + ) + + def _setup_handlers(self) -> HandlersManager: + """Set up Logging Handlers. + + Returns + ------- + HandlersManager + Handlers Manager object. + """ + handlers_manager = HandlersManager( + self._logger, settings=self._logging_settings + ) + handlers_manager.setup() + + self._logger.info("Logging configuration finished") + self._logger.info("Logging set to %s", self._logging_settings.handler_list) + self._logger.info("Verbosity set to %s", self._logging_settings.verbosity) + self._logger.info( + "LOGFORMAT: %s%s", + FormatterWithExceptions.LOGPREFIXFORMAT.replace("|", "-"), + FormatterWithExceptions.LOGFORMAT.replace("|", "-"), + ) + + return handlers_manager + + def _log_startup( + self, + route: Optional[str] = None, + custom_headers: Optional[Dict[str, Any]] = None, + ) -> None: + """Log startup information.""" + + def check_credentials_defined(credentials: Dict[str, Any]): + class CredentialsDefinition(Enum): + defined = "defined" + undefined = "undefined" + + return { + c: ( + CredentialsDefinition.defined.value + if credentials[c] + else CredentialsDefinition.undefined.value + ) + for c in credentials + } + + self._logger.info( + "STARTUP: %s ", + json.dumps( + { + "route": route, + "PREFERENCES": self._user_settings.preferences, + "KEYS": check_credentials_defined( + self._user_settings.credentials.model_dump() + if self._user_settings.credentials + else {} + ), + "SYSTEM": self._system_settings, + "custom_headers": custom_headers, + }, + default=to_jsonable_python, + ), + ) + + # pylint: disable=R0917 + def log( + self, + user_settings: UserSettings, + system_settings: SystemSettings, + route: str, + func: Callable, + kwargs: Dict[str, Any], + exec_info: Union[ + Tuple[Type[BaseException], BaseException, TracebackType], + Tuple[None, None, None], + ], + custom_headers: Optional[Dict[str, Any]] = None, + ): + """Log command output and relevant information. + + Parameters + ---------- + user_settings : UserSettings + User Settings object. + system_settings : SystemSettings + System Settings object. + route : str + Route for the command. + func : Callable + Callable representing the executed function. + kwargs : Dict[str, Any] + Keyword arguments passed to the function. + exec_info : Union[ + Tuple[Type[BaseException], BaseException, TracebackType], + Tuple[None, None, None], + ] + Exception information, by default None + """ + self._user_settings = user_settings + self._system_settings = system_settings + self._logging_settings = LoggingSettings( + user_settings=self._user_settings, + system_settings=self._system_settings, + ) + self._handlers_manager.update_handlers(self._logging_settings) + + if not self._logging_settings.logging_suppress: + + if "login" in route: + self._log_startup(route, custom_headers) + else: + + # Remove CommandContext if any + kwargs.pop("cc", None) + + # Get provider for posthog logs + passed_model = kwargs.get("provider_choices", DummyProvider()) + provider = ( + passed_model.provider + if hasattr(passed_model, "provider") + else "not_passed_to_kwargs" + ) + + # Truncate kwargs if too long + kwargs = {k: str(v)[:300] for k, v in kwargs.items()} + # Get execution info + error = None if all(i is None for i in exec_info) else str(exec_info[1]) + + # Construct message + message_label = "ERROR" if error else "CMD" + log_message = json.dumps( + { + "route": route, + "input": kwargs, + "error": error, + "provider": provider, + "custom_headers": custom_headers, + }, + default=to_jsonable_python, + ) + log_message = f"{message_label}: {log_message}" + log_level = self._logger.error if error else self._logger.info + log_level( + log_message, + extra={"func_name_override": func.__name__}, + exc_info=exec_info, + ) diff --git a/openbb_platform/core/openbb_core/app/logs/models/logging_settings.py b/openbb_platform/core/openbb_core/app/logs/models/logging_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..ec8e412db3368a875b127a4a31ee024e134e840c --- /dev/null +++ b/openbb_platform/core/openbb_core/app/logs/models/logging_settings.py @@ -0,0 +1,58 @@ +"""Logging settings.""" + +from pathlib import Path +from typing import List, Optional + +from openbb_core.app.logs.utils.utils import get_app_id, get_log_dir, get_session_id +from openbb_core.app.model.system_settings import SystemSettings +from openbb_core.app.model.user_settings import UserSettings + + +# pylint: disable=too-many-instance-attributes +class LoggingSettings: + """Logging settings.""" + + def __init__( + self, + user_settings: Optional[UserSettings] = None, + system_settings: Optional[SystemSettings] = None, + ): + """Initialize the logging settings.""" + user_settings = user_settings if user_settings is not None else UserSettings() + system_settings = ( + system_settings if system_settings is not None else SystemSettings() + ) + user_data_directory = ( + str(Path.home() / "OpenBBUserData") + if not user_settings.preferences + else user_settings.preferences.data_directory + ) + hub_session = ( + user_settings.profile.hub_session if user_settings.profile else None + ) + if hub_session: + user_id = hub_session.user_uuid + user_email = hub_session.email + user_primary_usage = hub_session.primary_usage + else: + user_id, user_email, user_primary_usage = None, None, None + + # System + self.app_name: str = system_settings.logging_app_name + self.sub_app_name: str = system_settings.logging_sub_app + self.app_id: str = get_app_id(user_data_directory) + self.session_id: str = get_session_id() + self.frequency: str = system_settings.logging_frequency + self.handler_list: List[str] = system_settings.logging_handlers + self.rolling_clock: bool = system_settings.logging_rolling_clock + self.verbosity: int = system_settings.logging_verbosity + self.platform: str = system_settings.platform + self.python_version: str = system_settings.python_version + self.platform_version: str = system_settings.version + self.logging_suppress: bool = system_settings.logging_suppress + self.log_collect: bool = system_settings.log_collect + # User + self.user_id: Optional[str] = user_id + self.user_logs_directory: Path = get_log_dir(user_data_directory) + self.user_email: Optional[str] = user_email + self.user_primary_usage: Optional[str] = user_primary_usage diff --git a/openbb_platform/core/openbb_core/app/logs/utils/expired_files.py b/openbb_platform/core/openbb_core/app/logs/utils/expired_files.py new file mode 100644 index 0000000000000000000000000000000000000000..feb933c9a69b2118b136365854edc7fc5674a782 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/logs/utils/expired_files.py @@ -0,0 +1,30 @@ +"""Expired files management utilities.""" + +import contextlib +from datetime import datetime +from pathlib import Path +from typing import List + + +def get_timestamp_from_x_days(x: int) -> float: + """Get the timestamp from x days ago.""" + timestamp_from_x_days = datetime.now().timestamp() - x * 86400 + return timestamp_from_x_days + + +def get_expired_file_list(directory: Path, before_timestamp: float) -> List[Path]: + """Get the list of expired files from a directory.""" + expired_files = [] + if directory.is_dir(): # Check if the directory exists and is a directory + for file in directory.iterdir(): + if file.is_file() and file.lstat().st_mtime < before_timestamp: + expired_files.append(file) + + return expired_files + + +def remove_file_list(file_list: List[Path]): + """Remove a list of files.""" + for file in file_list: + with contextlib.suppress(PermissionError): + file.unlink(missing_ok=True) diff --git a/openbb_platform/core/openbb_core/app/logs/utils/utils.py b/openbb_platform/core/openbb_core/app/logs/utils/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..e150627b147312526f9b8ca8a0d5e33656415331 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/logs/utils/utils.py @@ -0,0 +1,74 @@ +"""Utility functions for logging.""" + +import time +import uuid +import warnings +from pathlib import Path, PosixPath + + +def get_session_id() -> str: + """UUID of the current session.""" + session_id = str(uuid.uuid4()) + "-" + str(int(time.time())) + return session_id + + +def get_app_id(contextual_user_data_directory: str) -> str: + """Get UUID of the current installation.""" + try: + app_id = get_log_dir(contextual_user_data_directory).stem + except OSError as e: + if e.errno == 30: + warnings.warn("Please move the application into a writable location.") + warnings.warn( + "Note for macOS users: copy `OpenBB Terminal` folder outside the DMG." + ) + raise e + except Exception as e: + raise e + + return app_id + + +def get_log_dir(contextual_user_data_directory: str) -> PosixPath: + """Retrieve application's log directory.""" + log_dir = create_log_dir_if_not_exists(contextual_user_data_directory) + logging_uuid = create_log_uuid_if_not_exists(log_dir) + uuid_log_dir = create_uuid_dir_if_not_exists(log_dir, logging_uuid) + + return uuid_log_dir + + +def create_log_dir_if_not_exists(contextual_user_data_directory: str) -> Path: + """Create a log directory for the current installation.""" + log_dir = Path(contextual_user_data_directory).joinpath("logs").absolute() + if not log_dir.is_dir(): + log_dir.mkdir(parents=True, exist_ok=True) + + return log_dir + + +def create_log_uuid_if_not_exists(log_dir: Path) -> str: + """Create a log id file for the current logging session.""" + log_id = get_log_id(log_dir) + if not log_id.is_file(): + logging_id = f"{uuid.uuid4()}" + log_id.write_text(logging_id, encoding="utf-8") + else: + logging_id = log_id.read_text(encoding="utf-8").rstrip() + + return logging_id + + +def get_log_id(log_dir): + """Get the log id file.""" + return (log_dir / ".logid").absolute() + + +def create_uuid_dir_if_not_exists(log_dir, logging_id) -> PosixPath: + """Create a directory for the current logging session.""" + uuid_log_dir = (log_dir / logging_id).absolute() + + if not uuid_log_dir.is_dir(): + uuid_log_dir.mkdir(parents=True, exist_ok=True) + + return uuid_log_dir diff --git a/openbb_platform/core/openbb_core/app/model/__init__.py b/openbb_platform/core/openbb_core/app/model/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..44adb9707fe7ae57c6342a16b8b53a0ea77e2fd8 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/__init__.py @@ -0,0 +1 @@ +"""OpenBB Core App Model.""" diff --git a/openbb_platform/core/openbb_core/app/model/abstract/__init__.py b/openbb_platform/core/openbb_core/app/model/abstract/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..65daa6e565064106e9c5ac9e921a735aaea4f114 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/abstract/__init__.py @@ -0,0 +1 @@ +"""OpenBB Core App Abstract Model.""" diff --git a/openbb_platform/core/openbb_core/app/model/abstract/error.py b/openbb_platform/core/openbb_core/app/model/abstract/error.py new file mode 100644 index 0000000000000000000000000000000000000000..91c67d9aa2421d9f5d2f9190cc2e2838e236bc71 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/abstract/error.py @@ -0,0 +1,12 @@ +"""OpenBB Error.""" + +from typing import Optional, Union + + +class OpenBBError(Exception): + """OpenBB Error.""" + + def __init__(self, original: Optional[Union[str, Exception]] = None): + """Initialize the OpenBBError.""" + self.original = original + super().__init__(str(original)) diff --git a/openbb_platform/core/openbb_core/app/model/abstract/results.py b/openbb_platform/core/openbb_core/app/model/abstract/results.py new file mode 100644 index 0000000000000000000000000000000000000000..a9456b991898cbb7437c16d0e0e3cc59cd909762 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/abstract/results.py @@ -0,0 +1,5 @@ +"""OpenBB Core App Model Abstract Results.""" + +from pydantic import BaseModel + +Results = BaseModel diff --git a/openbb_platform/core/openbb_core/app/model/abstract/singleton.py b/openbb_platform/core/openbb_core/app/model/abstract/singleton.py new file mode 100644 index 0000000000000000000000000000000000000000..0ecccafc446a26c4e994140766162e6ad8377b67 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/abstract/singleton.py @@ -0,0 +1,20 @@ +"""Singleton metaclass implementation.""" + +from typing import Dict, Generic, TypeVar + +T = TypeVar("T") + + +class SingletonMeta(type, Generic[T]): + """Singleton metaclass.""" + + # TODO : check if we want to update this to be thread safe + _instances: Dict[T, T] = {} + + def __call__(cls: "SingletonMeta", *args, **kwargs): + """Singleton pattern implementation.""" + if cls not in cls._instances: + instance = super().__call__(*args, **kwargs) + cls._instances[cls] = instance + + return cls._instances[cls] diff --git a/openbb_platform/core/openbb_core/app/model/abstract/tagged.py b/openbb_platform/core/openbb_core/app/model/abstract/tagged.py new file mode 100644 index 0000000000000000000000000000000000000000..80cfe1678e0abd2075cb39faada52c6b7e25f391 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/abstract/tagged.py @@ -0,0 +1,10 @@ +"""OpenBB Core App Abstract Model Tagged.""" + +from pydantic import BaseModel, Field +from uuid_extensions import uuid7str # type: ignore + + +class Tagged(BaseModel): + """Model for Tagged.""" + + id: str = Field(default_factory=uuid7str, alias="_id") diff --git a/openbb_platform/core/openbb_core/app/model/abstract/warning.py b/openbb_platform/core/openbb_core/app/model/abstract/warning.py new file mode 100644 index 0000000000000000000000000000000000000000..7914b6af7f7aa208e0dbf8403be1c175f29faa7e --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/abstract/warning.py @@ -0,0 +1,24 @@ +"""Module for warnings.""" + +from warnings import WarningMessage + +from pydantic import BaseModel + + +class Warning_(BaseModel): + """Model for Warning.""" + + category: str + message: str + + +def cast_warning(w: WarningMessage) -> Warning_: + """Cast a warning to a pydantic model.""" + return Warning_( + category=w.category.__name__, + message=str(w.message), + ) + + +class OpenBBWarning(Warning): + """Base class for OpenBB warnings.""" diff --git a/openbb_platform/core/openbb_core/app/model/api_settings.py b/openbb_platform/core/openbb_core/app/model/api_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..37aefc6f3fa57aefbb2e6c88c2d2b51d83d970d9 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/api_settings.py @@ -0,0 +1,57 @@ +"""FastAPI configuration settings model.""" + +from typing import Dict, List, Optional + +from pydantic import BaseModel, ConfigDict, Field, computed_field + + +class Cors(BaseModel): + """Cors model for FastAPI configuration.""" + + model_config = ConfigDict(frozen=True) + + allow_origins: List[str] = Field(default_factory=lambda: ["*"]) + allow_methods: List[str] = Field(default_factory=lambda: ["*"]) + allow_headers: List[str] = Field(default_factory=lambda: ["*"]) + + +class Servers(BaseModel): + """Servers model for FastAPI configuration.""" + + model_config = ConfigDict(frozen=True) + + url: str = "" + description: str = "Local OpenBB development server" + + +class APISettings(BaseModel): + """Settings model for FastAPI configuration.""" + + model_config = ConfigDict(frozen=True) + + version: str = "1" + title: str = "OpenBB Platform API" + description: str = "Investment research for everyone, anywhere." + terms_of_service: str = "http://example.com/terms/" + contact_name: str = "OpenBB Team" + contact_url: str = "https://openbb.co" + contact_email: str = "hello@openbb.co" + license_name: str = "AGPLv3" + license_url: str = "https://github.com/OpenBB-finance/OpenBB/blob/develop/LICENSE" + servers: List[Servers] = Field(default_factory=lambda: [Servers()]) + cors: Cors = Field(default_factory=Cors) + custom_headers: Optional[Dict[str, str]] = Field( + default=None, description="Custom headers and respective default value." + ) + + @computed_field # type: ignore[misc] + @property + def prefix(self) -> str: + """Return the API prefix.""" + return f"/api/v{self.version}" + + def __repr__(self) -> str: + """Return a string representation of the model.""" + return f"{self.__class__.__name__}\n\n" + "\n".join( + f"{k}: {v}" for k, v in self.model_dump().items() + ) diff --git a/openbb_platform/core/openbb_core/app/model/charts/chart.py b/openbb_platform/core/openbb_core/app/model/charts/chart.py new file mode 100644 index 0000000000000000000000000000000000000000..c5166400bf6736799a78320e8b5d6283b2235f20 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/charts/chart.py @@ -0,0 +1,30 @@ +"""OpenBB Core Chart model.""" + +from typing import Any, Dict, Optional + +from pydantic import BaseModel, ConfigDict, Field + + +class Chart(BaseModel): + """Model for Chart.""" + + content: Optional[Dict[str, Any]] = Field( + default=None, + description="Raw textual representation of the chart.", + ) + format: Optional[str] = Field( + default=None, + description="Complementary attribute to the `content` attribute. It specifies the format of the chart.", + ) + fig: Optional[Any] = Field( + default=None, + description="The figure object.", + json_schema_extra={"exclude_from_api": True}, + ) + model_config = ConfigDict(validate_assignment=True) + + def __repr__(self) -> str: + """Return string representation.""" + return f"{self.__class__.__name__}\n\n" + "\n".join( + f"{k}: {v}" for k, v in self.model_dump().items() + ) diff --git a/openbb_platform/core/openbb_core/app/model/charts/charting_settings.py b/openbb_platform/core/openbb_core/app/model/charts/charting_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..b5352692e8edfc2ff533107650e71d4a89e908cd --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/charts/charting_settings.py @@ -0,0 +1,62 @@ +"""Charting settings.""" + +import importlib +from pathlib import Path +from typing import TYPE_CHECKING, Optional + +from openbb_core.app.logs.utils.utils import get_app_id +from openbb_core.env import Env + +if TYPE_CHECKING: + from openbb_core.app.model.system_settings import SystemSettings + from openbb_core.app.model.user_settings import UserSettings + + +# pylint: disable=too-many-instance-attributes +class ChartingSettings: + """Charting settings.""" + + def __init__( + self, + user_settings: Optional["UserSettings"] = None, + system_settings: Optional["SystemSettings"] = None, + ): + """Initialize charting settings.""" + user_settings_module = importlib.import_module( + "openbb_core.app.model.user_settings", "UserSettings" + ) + system_settings_module = importlib.import_module( + "openbb_core.app.model.system_settings", "SystemSettings" + ) + + UserSettings = user_settings_module.UserSettings + SystemSettings = system_settings_module.SystemSettings + user_settings = user_settings or UserSettings() + system_settings = system_settings or SystemSettings() + + user_data_directory = ( + str(Path.home() / "OpenBBUserData") + if not user_settings.preferences + else user_settings.preferences.data_directory + ) + + # System + self.log_collect: bool = system_settings.log_collect + self.version: str = system_settings.version + self.python_version: str = system_settings.python_version + self.test_mode = system_settings.test_mode + self.app_id: str = get_app_id(user_data_directory) + self.debug_mode: bool = system_settings.debug_mode or Env().DEBUG_MODE + self.headless: bool = system_settings.headless + # User + self.user_email: Optional[str] = getattr( + user_settings.profile.hub_session, "email", None + ) + self.user_uuid: Optional[str] = getattr( + user_settings.profile.hub_session, "user_uuid", None + ) + self.user_exports_directory = user_settings.preferences.export_directory + self.user_styles_directory = user_settings.preferences.user_styles_directory + # Theme + self.chart_style: str = user_settings.preferences.chart_style + self.table_style = user_settings.preferences.table_style diff --git a/openbb_platform/core/openbb_core/app/model/command_context.py b/openbb_platform/core/openbb_core/app/model/command_context.py new file mode 100644 index 0000000000000000000000000000000000000000..ede1aac4bd7ee9875ae7a43ab754d0911139a93a --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/command_context.py @@ -0,0 +1,12 @@ +"""Command Context.""" + +from openbb_core.app.model.system_settings import SystemSettings +from openbb_core.app.model.user_settings import UserSettings +from pydantic import BaseModel, Field + + +class CommandContext(BaseModel): + """Command Context.""" + + user_settings: UserSettings = Field(default_factory=UserSettings) + system_settings: SystemSettings = Field(default_factory=SystemSettings) diff --git a/openbb_platform/core/openbb_core/app/model/credentials.py b/openbb_platform/core/openbb_core/app/model/credentials.py new file mode 100644 index 0000000000000000000000000000000000000000..7bcbedf189fd8911e87730be83661f4a6288204d --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/credentials.py @@ -0,0 +1,146 @@ +"""Credentials model and its utilities.""" + +import json +import traceback +import warnings +from pathlib import Path +from typing import Dict, List, Optional, Tuple + +from openbb_core.app.constants import USER_SETTINGS_PATH +from openbb_core.app.extension_loader import ExtensionLoader +from openbb_core.app.model.abstract.warning import OpenBBWarning +from openbb_core.app.provider_interface import ProviderInterface +from openbb_core.env import Env +from pydantic import ( + BaseModel, + ConfigDict, + Field, + SecretStr, + create_model, +) +from pydantic.functional_serializers import PlainSerializer +from typing_extensions import Annotated + + +class LoadingError(Exception): + """Error loading extension.""" + + +# @model_serializer blocks model_dump with pydantic parameters (include, exclude) +OBBSecretStr = Annotated[ + SecretStr, + PlainSerializer( + lambda x: x.get_secret_value(), return_type=str, when_used="json-unless-none" + ), +] + + +class CredentialsLoader: + """Here we create the Credentials model.""" + + credentials: Dict[str, List[str]] = {} + + def format_credentials(self, additional: dict) -> Dict[str, Tuple[object, None]]: + """Prepare credentials map to be used in the Credentials model.""" + formatted: Dict[str, Tuple[object, None]] = {} + for c_origin, c_list in self.credentials.items(): + for c_name in c_list: + if c_name in formatted: + warnings.warn( + message=f"Skipping '{c_name}', credential already in use.", + category=OpenBBWarning, + ) + continue + formatted[c_name] = ( + Optional[OBBSecretStr], + Field(default=None, description=c_origin, alias=c_name.upper()), + ) + + if additional: + for key, value in additional.items(): + if key in formatted: + continue + formatted[key] = ( + Optional[OBBSecretStr], + Field(default=value, description=key, alias=key.upper()), + ) + + return dict(sorted(formatted.items())) + + def from_obbject(self) -> None: + """Load credentials from OBBject extensions.""" + for ext_name, ext in ExtensionLoader().obbject_objects.items(): # type: ignore[attr-defined] + try: + if ext_name in self.credentials: + warnings.warn( + message=f"Skipping '{ext_name}', name already in user.", + category=OpenBBWarning, + ) + continue + self.credentials[ext_name] = ext.credentials + except Exception as e: + msg = f"Error loading extension: {ext_name}\n" + if Env().DEBUG_MODE: + traceback.print_exception(type(e), e, e.__traceback__) + raise LoadingError(msg + f"\033[91m{e}\033[0m") from e + warnings.warn( + message=msg, + category=OpenBBWarning, + ) + + def from_providers(self) -> None: + """Load credentials from providers.""" + self.credentials = ProviderInterface().credentials + + def load(self) -> BaseModel: + """Load credentials from providers.""" + # We load providers first to give them priority choosing credential names + self.from_providers() + self.from_obbject() + path = Path(USER_SETTINGS_PATH) + additional: dict = {} + + if path.exists(): + with open(USER_SETTINGS_PATH, encoding="utf-8") as f: + data = json.load(f) + if "credentials" in data: + additional = data["credentials"] + + model = create_model( + "Credentials", + __config__=ConfigDict(validate_assignment=True, populate_by_name=True), + **self.format_credentials(additional), # type: ignore + ) + model.origins = self.credentials + return model + + +_Credentials = CredentialsLoader().load() + + +class Credentials(_Credentials): # type: ignore + """Credentials model used to store provider credentials.""" + + model_config = ConfigDict(extra="allow") + + def __repr__(self) -> str: + """Define the string representation of the credentials.""" + return ( + self.__class__.__name__ + + "\n\n" + + "\n".join([f"{k}: {v}" for k, v in sorted(self.__dict__.items())]) + ) + + def show(self): + """Unmask credentials and print them.""" + print( # noqa: T201 + self.__class__.__name__ + + "\n\n" + + "\n".join( + [f"{k}: {v}" for k, v in sorted(self.model_dump(mode="json").items())] + ) + ) + + def update(self, incoming: "Credentials"): + """Update current credentials.""" + self.__dict__.update(incoming.model_dump(exclude_none=True)) diff --git a/openbb_platform/core/openbb_core/app/model/defaults.py b/openbb_platform/core/openbb_core/app/model/defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..a5a0d11bfaf62bc596ddccc816ca81ef843c6b1b --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/defaults.py @@ -0,0 +1,56 @@ +"""Defaults model.""" + +from typing import Any +from warnings import warn + +from openbb_core.app.model.abstract.warning import OpenBBWarning +from pydantic import BaseModel, ConfigDict, Field, model_validator + + +class Defaults(BaseModel): + """Defaults.""" + + model_config = ConfigDict(validate_assignment=True, populate_by_name=True) + + commands: dict[str, dict[str, Any]] = Field( + default_factory=dict, + alias="routes", + ) + + def __repr__(self) -> str: + """Return string representation.""" + return f"{self.__class__.__name__}\n\n" + "\n".join( + f"{k}: {v}" for k, v in self.model_dump().items() + ) + + @model_validator(mode="before") + @classmethod + def validate_before(cls, values: dict) -> dict: + """Validate model (before).""" + key = "commands" + if "routes" in values: + if not values.get("routes"): + del values["routes"] + show_warnings = values.get("preferences", {}).get("show_warnings") + if show_warnings is False or show_warnings in ["False", "false"]: + warn( + message="The 'routes' key is deprecated within 'defaults' of 'user_settings.json'." + + " Suppress this warning by updating the key to 'commands'.", + category=OpenBBWarning, + ) + key = "routes" + + new_values: dict = {"commands": {}} + for k, v in values.get(key, {}).items(): + clean_k = k.strip("/").replace("/", ".") + provider = v.get("provider") if v else None + if isinstance(provider, str): + v["provider"] = [provider] + new_values["commands"][clean_k] = v + + return new_values + + def update(self, incoming: "Defaults"): + """Update current defaults.""" + incoming_commands = incoming.model_dump(exclude_none=True).get("commands", {}) + self.__dict__["commands"].update(incoming_commands) diff --git a/openbb_platform/core/openbb_core/app/model/example.py b/openbb_platform/core/openbb_core/app/model/example.py new file mode 100644 index 0000000000000000000000000000000000000000..505c228371e69952bb94b4acf102da86cec45aa8 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/example.py @@ -0,0 +1,229 @@ +"""Example class to represent endpoint examples.""" + +from abc import abstractmethod +from datetime import date, datetime, timedelta +from typing import Any, Dict, List, Literal, Optional, Union, _GenericAlias # type: ignore + +from pydantic import ( + BaseModel, + ConfigDict, + Field, + computed_field, + model_validator, +) + +QUOTE_TYPES = {str, date} + + +class Example(BaseModel): + """Example model.""" + + scope: str + + model_config = ConfigDict(validate_assignment=True) + + @abstractmethod + def to_python(self, **kwargs) -> str: + """Return a Python code representation of the example.""" + + +class APIEx(Example): + """API Example model.""" + + scope: Literal["api"] = "api" + description: Optional[str] = Field( + default=None, description="Optional description unless more than 3 parameters" + ) + parameters: Dict[str, Union[str, int, float, bool, List[str], List[Dict[str, Any]]]] + + @computed_field # type: ignore[misc] + @property + def provider(self) -> Optional[str]: + """Return the provider from the parameters.""" + return self.parameters.get("provider") # type: ignore + + @model_validator(mode="before") + @classmethod + def validate_model(cls, values: dict) -> dict: + """Validate model.""" + parameters = values.get("parameters", {}) + if "provider" not in parameters and "data" not in parameters: + raise ValueError("API example must specify a provider.") + + provider = parameters.get("provider") + if provider and not isinstance(provider, str): + raise ValueError("Provider must be a string.") + + return values + + @staticmethod + def _unpack_type(type_: type) -> set: + """Unpack types from types, example Union[List[str], int] -> {typing._GenericAlias, int}.""" + if ( + hasattr(type_, "__args__") + and type(type_) # pylint: disable=unidiomatic-typecheck + is not _GenericAlias + ): + return set().union(*map(APIEx._unpack_type, type_.__args__)) # type: ignore + return {type_} if isinstance(type_, type) else {type(type_)} + + @staticmethod + def _shift(i: int) -> float: + """Return a transformation of the integer.""" + return 2 * (i + 1) / (2 * i) % 1 + 1 + + @staticmethod + def mock_data( + dataset: Literal["timeseries", "panel"], + size: int = 5, + sample: Optional[Dict[str, Any]] = None, + multiindex: Optional[Dict[str, Any]] = None, + ) -> List[Dict]: + """Generate mock data from a sample. + + Parameters + ---------- + dataset : str + The type of data to return: + - 'timeseries': Time series data + - 'panel': Panel data (multiindex) + + size : int + The size of the data to return, default is 5. + sample : Optional[Dict[str, Any]], optional + A sample of the data to return, by default None. + multiindex_names : Optional[List[str]], optional + The names of the multiindex, by default None. + + Timeseries default sample: + { + "date": "2023-01-01", + "open": 110.0, + "high": 120.0, + "low": 100.0, + "close": 115.0, + "volume": 10000, + } + + Panel default sample: + { + "portfolio_value": 100000, + "risk_free_rate": 0.02, + } + multiindex: {"asset_manager": "AM", "time": 0} + + Returns + ------- + List[Dict] + A list of dictionaries with the mock data. + """ + if dataset == "timeseries": + sample = sample or { + "date": "2023-01-01", + "open": 110.0, + "high": 120.0, + "low": 100.0, + "close": 115.0, + "volume": 10000, + } + result = [] + for i in range(1, size + 1): + s = APIEx._shift(i) + obs = {} + for k, v in sample.items(): + if k == "date": + obs[k] = ( + datetime.strptime(v, "%Y-%m-%d") + timedelta(days=i) + ).strftime("%Y-%m-%d") + else: + obs[k] = round(v * s, 2) + result.append(obs) + return result + if dataset == "panel": + sample = sample or { + "portfolio_value": 100000.0, + "risk_free_rate": 0.02, + } + multiindex = multiindex or {"asset_manager": "AM", "time": 0} + multiindex_names = list(multiindex.keys()) + idx_1 = multiindex_names[0] + idx_2 = multiindex_names[1] + items_per_idx = 2 + item: Dict[str, Any] = { + "is_multiindex": True, + "multiindex_names": str(multiindex_names), + } + # Iterate over the number of items to create and add them to the result + result = [] + for i in range(1, size + 1): + item[idx_1] = f"{idx_1}_{i}" + for j in range(items_per_idx): + item[idx_2] = j + for k, v in sample.items(): + if isinstance(v, str): + item[k] = f"{v}_{j}" + else: + item[k] = round(v * APIEx._shift(i + j), 2) + result.append(item.copy()) + return result + raise ValueError(f"Dataset '{dataset}' not found.") + + def to_python(self, **kwargs) -> str: + """Return a Python code representation of the example.""" + indentation = kwargs.get("indentation", "") + func_path = kwargs.get("func_path", ".func_router.func_name") + param_types: Dict[str, type] = kwargs.get("param_types", {}) + prompt = kwargs.get("prompt", "") + + eg = "" + if self.description: + eg += f"{indentation}{prompt}# {self.description}\n" + + eg += f"{indentation}{prompt}obb{func_path}(" + for k, v in self.parameters.items(): + if k in param_types and (type_ := param_types.get(k)): + if QUOTE_TYPES.intersection(self._unpack_type(type_)): + eg += f"{k}='{v}', " + else: + eg += f"{k}={v}, " + else: + eg += f"{k}={v}, " + + eg = indentation + eg.strip(", ") + ")\n" + + return eg + + +class PythonEx(Example): + """Python Example model.""" + + scope: Literal["python"] = "python" + description: str + code: List[str] + + def to_python(self, **kwargs) -> str: + """Return a Python code representation of the example.""" + indentation = kwargs.get("indentation", "") + prompt = kwargs.get("prompt", "") + + eg = "" + if self.description: + eg += f"{indentation}{prompt}# {self.description}\n" + + for line in self.code: + eg += f"{indentation}{prompt}{line}\n" + + return eg + + +def filter_list( + examples: List[Example], + providers: List[str], +) -> List[Example]: + """Filter list of examples.""" + return [ + e + for e in examples + if (isinstance(e, APIEx) and (not e.provider or e.provider in providers)) + or e.scope != "api" + ] diff --git a/openbb_platform/core/openbb_core/app/model/extension.py b/openbb_platform/core/openbb_core/app/model/extension.py new file mode 100644 index 0000000000000000000000000000000000000000..a23360ae7908d9b934a12e188d4931b6134270f4 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/extension.py @@ -0,0 +1,79 @@ +"""Extension class for OBBject extensions.""" + +import warnings +from typing import Callable, List, Optional + + +class Extension: + """ + Serves as OBBject extension entry point and must be created by each extension package. + + See https://docs.openbb.co/platform/development/developer-guidelines/obbject_extensions. + """ + + def __init__( + self, + name: str, + credentials: Optional[List[str]] = None, + description: Optional[str] = None, + ) -> None: + """Initialize the extension. + + Parameters + ---------- + name : str + Name of the extension. + credentials : Optional[List[str]], optional + List of required credentials, by default None + description: Optional[str] + Extension description. + """ + self.name = name + self.credentials = credentials or [] + self.description = description + + @property + def obbject_accessor(self) -> Callable: + """Extend an OBBject, inspired by pandas.""" + # pylint: disable=import-outside-toplevel + # Avoid circular imports + + from openbb_core.app.model.obbject import OBBject + + return self.register_accessor(self.name, OBBject) + + @staticmethod + def register_accessor(name, cls) -> Callable: + """Register a custom accessor.""" + + def decorator(accessor): + if hasattr(cls, name): + warnings.warn( + f"registration of accessor '{repr(accessor)}' under name " + f"'{repr(name)}' for type '{repr(cls)}' is overriding a preexisting " + f"attribute with the same name.", + UserWarning, + ) + setattr(cls, name, CachedAccessor(name, accessor)) + # pylint: disable=protected-access + cls.accessors.add(name) + return accessor + + return decorator + + +class CachedAccessor: + """CachedAccessor.""" + + def __init__(self, name: str, accessor) -> None: + """Initialize the cached accessor.""" + self._name = name + self._accessor = accessor + + def __get__(self, obj, cls): + """Get the cached accessor.""" + if obj is None: + return self._accessor + accessor_obj = self._accessor(obj) + object.__setattr__(obj, self._name, accessor_obj) + return accessor_obj diff --git a/openbb_platform/core/openbb_core/app/model/field.py b/openbb_platform/core/openbb_core/app/model/field.py new file mode 100644 index 0000000000000000000000000000000000000000..d791cb08f08f8e40520b7d0a01c958fcd246cc32 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/field.py @@ -0,0 +1,28 @@ +"""Custom field for OpenBB.""" + +from typing import Any, List, Optional + +from pydantic.fields import FieldInfo + + +class OpenBBField(FieldInfo): + """Custom field for OpenBB.""" + + def __repr__(self): + """Override FieldInfo __repr__.""" + # We use repr() to avoid decoding special characters like \n + if self.choices: + return f"OpenBBField(description={repr(self.description)}, choices={repr(self.choices)})" + return f"OpenBBField(description={repr(self.description)})" + + def __init__(self, description: str, choices: Optional[List[Any]] = None): + """Initialize OpenBBField.""" + json_schema_extra = {"choices": choices} if choices else None + super().__init__(description=description, json_schema_extra=json_schema_extra) # type: ignore[arg-type] + + @property + def choices(self) -> Optional[List[Any]]: + """Custom choices.""" + if self.json_schema_extra: + return self.json_schema_extra.get("choices") # type: ignore[union-attr,return-value] + return None diff --git a/openbb_platform/core/openbb_core/app/model/hub/hub_session.py b/openbb_platform/core/openbb_core/app/model/hub/hub_session.py new file mode 100644 index 0000000000000000000000000000000000000000..7a9cf61599138a26d576403e866044afec37e88b --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/hub/hub_session.py @@ -0,0 +1,27 @@ +"""Model for HubSession.""" + +from typing import Optional + +from pydantic import BaseModel, SecretStr, field_serializer + + +class HubSession(BaseModel): + """Model for HubSession.""" + + username: Optional[str] = None + email: str + primary_usage: str + user_uuid: str + token_type: str + access_token: SecretStr + + def __repr__(self) -> str: + """Return string representation.""" + return f"{self.__class__.__name__}\n\n" + "\n".join( + f"{k}: {v}" for k, v in self.model_dump().items() + ) + + @field_serializer("access_token", when_used="json-unless-none") + def _dump_secret(self, v): + """Dump secret.""" + return v.get_secret_value() diff --git a/openbb_platform/core/openbb_core/app/model/hub/hub_user_settings.py b/openbb_platform/core/openbb_core/app/model/hub/hub_user_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..4a642f8d52b5e74ac52fdf1348f092b395fb5b49 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/hub/hub_user_settings.py @@ -0,0 +1,22 @@ +"""Hub user settings model.""" + +from typing import Any, Dict, Optional + +from pydantic import BaseModel, ConfigDict, Field, field_validator + + +class HubUserSettings(BaseModel): + """Hub user settings model.""" + + features_settings: Dict[str, Any] = Field(default_factory=dict) + features_keys: Dict[str, Optional[str]] = Field(default_factory=dict) + # features_sources: Dict[str, Any] + # features_terminal_style: Dict[str, Union[str, Dict[str, str]]] + + model_config = ConfigDict(validate_assignment=True) + + @field_validator("features_keys", mode="before", check_fields=False) + @classmethod + def to_lower(cls, d: dict) -> dict: + """Convert dict keys to lowercase.""" + return {k.lower(): v for k, v in d.items()} diff --git a/openbb_platform/core/openbb_core/app/model/metadata.py b/openbb_platform/core/openbb_core/app/model/metadata.py new file mode 100644 index 0000000000000000000000000000000000000000..a8d660f0d91ef518ecdf28fd25ee7b4caf35c7e7 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/metadata.py @@ -0,0 +1,142 @@ +"""Metadata model.""" + +from datetime import datetime +from typing import Any, Dict, Optional, Sequence, Union + +from openbb_core.provider.abstract.data import Data +from pydantic import BaseModel, Field, field_validator + + +class Metadata(BaseModel): + """Metadata of a command execution.""" + + arguments: Dict[str, Any] = Field( + default_factory=dict, + description="Arguments of the command.", + ) + duration: int = Field( + description="Execution duration in nano second of the command." + ) + route: str = Field(description="Route of the command.") + timestamp: datetime = Field(description="Execution starting timestamp.") + + def __repr__(self) -> str: + """Return string representation.""" + return f"{self.__class__.__name__}\n\n" + "\n".join( + f"{k}: {v}" for k, v in self.model_dump().items() + ) + + @field_validator("arguments") + @classmethod + def scale_arguments(cls, v): + """Scale arguments. + + This function is meant to limit the size of the input arguments of a command. + If the type is one of the following: `Data`, `List[Data]`, `DataFrame`, `List[DataFrame]`, + `Series`, `List[Series]` or `ndarray`, the value of the argument is swapped by a dictionary + containing the type and the columns. If the type is not one of the previous, the + value is kept or trimmed to 80 characters. + """ + # pylint: disable=import-outside-toplevel + from inspect import isclass # noqa + from numpy import ndarray # noqa + from pandas import DataFrame, Series # noqa + + arguments: Dict[str, Any] = {} + for item in ["provider_choices", "standard_params", "extra_params"]: + arguments[item] = {} + # The item could be class or it could a dictionary. + v_item = ( + v.__dict__.get(item, {}) if not isinstance(v, dict) else v.get(item, {}) + ) + # The item might not be a dictionary yet. + v_item = v_item if isinstance(v_item, dict) else v_item.__dict__ + for arg, arg_val in v_item.items(): + new_arg_val: Optional[Union[str, dict[str, Sequence[Any]]]] = None + + # Data + if isclass(type(arg_val)) and issubclass(type(arg_val), Data): + new_arg_val = { + "type": f"{type(arg_val).__name__}", + "columns": list(arg_val.model_dump().keys()), + } + + # List[Data] + if isinstance(arg_val, list) and issubclass(type(arg_val[0]), Data): + _columns = [list(d.model_dump().keys()) for d in arg_val] + ld_columns = ( + item for sublist in _columns for item in sublist + ) # flatten + new_arg_val = { + "type": f"List[{type(arg_val[0]).__name__}]", + "columns": list(set(ld_columns)), + } + + # DataFrame + elif isinstance(arg_val, DataFrame): + df_columns = ( + list(arg_val.index.names) + arg_val.columns.tolist() + if any(index is not None for index in list(arg_val.index.names)) + else arg_val.columns.tolist() + ) + new_arg_val = { + "type": f"{type(arg_val).__name__}", + "columns": df_columns, + } + + # List[DataFrame] + elif isinstance(arg_val, list) and issubclass( + type(arg_val[0]), DataFrame + ): + ldf_columns = [ + ( + list(df.index.names) + df.columns.tolist() + if any(index is not None for index in list(df.index.names)) + else df.columns.tolist() + ) + for df in arg_val + ] + new_arg_val = { + "type": f"List[{type(arg_val[0]).__name__}]", + "columns": ldf_columns, + } + + # Series + elif isinstance(arg_val, Series): + new_arg_val = { + "type": f"{type(arg_val).__name__}", + "columns": list(arg_val.index.names) + [arg_val.name], + } + + # List[Series] + elif isinstance(arg_val, list) and isinstance(arg_val[0], Series): + ls_columns = [ + ( + list(series.index.names) + [series.name] + if any( + index is not None for index in list(series.index.names) + ) + else series.name + ) + for series in arg_val + ] + new_arg_val = { + "type": f"List[{type(arg_val[0]).__name__}]", + "columns": ls_columns, + } + + # ndarray + elif isinstance(arg_val, ndarray): + new_arg_val = { + "type": f"{type(arg_val).__name__}", + "columns": list(arg_val.dtype.names or []), + } + + else: + str_repr_arg_val = str(arg_val) + if len(str_repr_arg_val) > 80: + new_arg_val = str_repr_arg_val[:80] + + arguments[item][arg] = new_arg_val or arg_val + + return arguments diff --git a/openbb_platform/core/openbb_core/app/model/obbject.py b/openbb_platform/core/openbb_core/app/model/obbject.py new file mode 100644 index 0000000000000000000000000000000000000000..b38eba914ab65eb301d302f6e915d9337837c4a2 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/obbject.py @@ -0,0 +1,387 @@ +"""The OBBject.""" + +# pylint: disable=too-many-branches, too-many-locals, too-many-statements + +from collections.abc import Hashable +from typing import ( + TYPE_CHECKING, + Any, + Callable, + ClassVar, + Dict, + Generic, + List, + Literal, + Optional, + Set, + TypeVar, + Union, +) + +from openbb_core.app.model.abstract.error import OpenBBError +from openbb_core.app.model.abstract.tagged import Tagged +from openbb_core.app.model.abstract.warning import Warning_ +from openbb_core.app.model.charts.chart import Chart +from openbb_core.provider.abstract.annotated_result import AnnotatedResult +from openbb_core.provider.abstract.data import Data +from pydantic import BaseModel, Field, PrivateAttr + +if TYPE_CHECKING: + from numpy import ndarray # noqa + from pandas import DataFrame # noqa + from openbb_core.app.query import Query # noqa + + try: + from polars import DataFrame as PolarsDataFrame # type: ignore + except ImportError: + PolarsDataFrame = None + +T = TypeVar("T") + + +class OBBject(Tagged, Generic[T]): + """OpenBB object.""" + + accessors: ClassVar[Set[str]] = set() + _user_settings: ClassVar[Optional[BaseModel]] = None + _system_settings: ClassVar[Optional[BaseModel]] = None + + results: Optional[T] = Field( + default=None, + description="Serializable results.", + ) + provider: Optional[str] = Field( # type: ignore + default=None, + description="Provider name.", + ) + warnings: Optional[List[Warning_]] = Field( + default=None, + description="List of warnings.", + ) + chart: Optional[Chart] = Field( + default=None, + description="Chart object.", + ) + extra: Dict[str, Any] = Field( + default_factory=dict, + description="Extra info.", + ) + _route: str = PrivateAttr( + default=None, + ) + _standard_params: Optional[Dict[str, Any]] = PrivateAttr( + default_factory=dict, + ) + _extra_params: Optional[Dict[str, Any]] = PrivateAttr( + default_factory=dict, + ) + + def __repr__(self) -> str: + """Human readable representation of the object.""" + items = [ + f"{k}: {v}"[:83] + ("..." if len(f"{k}: {v}") > 83 else "") + for k, v in self.model_dump().items() + ] + return f"{self.__class__.__name__}\n\n" + "\n".join(items) + + def to_df( + self, + index: Optional[Union[str, None]] = "date", + sort_by: Optional[str] = None, + ascending: Optional[bool] = None, + ) -> "DataFrame": + """Alias for `to_dataframe`. + + Supports converting creating Pandas DataFrames from the following + serializable data formats: + + - List[BaseModel] + - List[Dict] + - List[List] + - List[str] + - List[int] + - List[float] + - Dict[str, Dict] + - Dict[str, List] + - Dict[str, BaseModel] + + Other supported formats: + - str + + Parameters + ---------- + index : Optional[str] + Column name to use as index. + sort_by : Optional[str] + Column name to sort by. + ascending: Optional[bool] + Sort by ascending for each column specified in `sort_by`. + + Returns + ------- + DataFrame + Pandas DataFrame. + """ + return self.to_dataframe(index=index, sort_by=sort_by, ascending=ascending) + + def to_dataframe( # noqa: PLR0912 + self, + index: Optional[Union[str, None]] = "date", + sort_by: Optional[str] = None, + ascending: Optional[bool] = None, + ) -> "DataFrame": + """Convert results field to Pandas DataFrame. + + Supports converting creating Pandas DataFrames from the following + serializable data formats: + + - List[BaseModel] + - List[Dict] + - List[List] + - List[str] + - List[int] + - List[float] + - Dict[str, Dict] + - Dict[str, List] + - Dict[str, BaseModel] + + Other supported formats: + - str + + Parameters + ---------- + index : Optional[str] + Column name to use as index. + sort_by : Optional[str] + Column name to sort by. + ascending: Optional[bool] + Sort by ascending for each column specified in `sort_by`. + + Returns + ------- + DataFrame + Pandas DataFrame. + """ + # pylint: disable=import-outside-toplevel + from pandas import DataFrame, Series, concat # noqa + from openbb_core.app.utils import basemodel_to_df # noqa + + def is_list_of_basemodel(items: Union[List[T], T]) -> bool: + return isinstance(items, list) and all( + isinstance(item, BaseModel) for item in items + ) + + if self.results is None or not self.results: + raise OpenBBError("Results not found.") + + if isinstance(self.results, DataFrame): + return self.results + + try: + res = self.results + df = None + sort_columns = True + + # BaseModel + if isinstance(res, BaseModel): + res_dict = res.model_dump( # pylint: disable=no-member + exclude_unset=True, exclude_none=True + ) + # Model is serialized as a dict[str, list] or list[dict] + if ( + ( + isinstance(res_dict, dict) + and res_dict + and all(isinstance(v, list) for v in res_dict.values()) + ) + or isinstance(res_dict, list) + and all(isinstance(item, dict) for item in res_dict) + ): + df = DataFrame(res_dict) + sort_columns = False + else: + series = Series(res_dict, name=res.__class__.__name__) + df = series.to_frame().reset_index() + sort_columns = False + + # Dict[str, Any] + elif isinstance(res, dict): + try: + df = DataFrame.from_dict(res).T + except ValueError: + try: + df = DataFrame.from_dict(res, orient="index") + except ValueError: + series = Series(res, name="values") + df = series.to_frame().reset_index() + sort_columns = False + + # List[Dict] + elif isinstance(res, list) and len(res) == 1 and isinstance(res[0], dict): + r = res[0] + dict_of_df = {} + + for k, v in r.items(): + # Dict[str, List[BaseModel]] + if is_list_of_basemodel(v): + dict_of_df[k] = basemodel_to_df(v, index) + sort_columns = False + # Dict[str, Any] + else: + dict_of_df[k] = DataFrame(v) + + df = concat(dict_of_df, axis=1) + + # List[BaseModel] + elif is_list_of_basemodel(res): + dt: Union[List[Data], Data] = res # type: ignore + r = dt[0] if isinstance(dt, list) and len(dt) == 1 else None # type: ignore + if r and all( + prop.get("type") == "array" + for prop in r.model_json_schema()["properties"].values() # type: ignore + ): + sort_columns = False + df = DataFrame(r.model_dump(exclude_unset=True, exclude_none=True)) # type: ignore + else: + df = basemodel_to_df(dt, index) + sort_columns = False + # str + elif isinstance(res, str): + df = DataFrame([res]) + # List[List | str | int | float] | Dict[str, Dict | List | BaseModel] + else: + try: + df = DataFrame(res) # type: ignore[call-overload] + except ValueError: + if isinstance(res, dict): + df = DataFrame([res]) + + if df is None: + raise OpenBBError("Unsupported data format.") + + # Set index, if any + if index is not None and index in df.columns: + df.set_index(index, inplace=True) + + # Drop columns that are all NaN, but don't rearrange columns + if sort_columns: + df.sort_index(axis=1, inplace=True) + df = df.dropna(axis=1, how="all") + + # Sort by specified column + if sort_by: + df.sort_values( + by=sort_by, + ascending=ascending if ascending is not None else True, + inplace=True, + ) + + except OpenBBError as e: + raise e + except ValueError as ve: + raise OpenBBError( + f"ValueError: {ve}. Ensure the data format matches the expected format." + ) from ve + except TypeError as te: + raise OpenBBError( + f"TypeError: {te}. Check the data types in your results." + ) from te + except Exception as ex: + raise OpenBBError(f"An unexpected error occurred: {ex}") from ex + + return df + + def to_polars(self) -> "PolarsDataFrame": # type: ignore + """Convert results field to polars dataframe.""" + try: + from polars import from_pandas # type: ignore # pylint: disable=import-outside-toplevel + except ImportError as exc: + raise ImportError( + "Please install polars: `pip install polars pyarrow` to use this method." + ) from exc + + return from_pandas(self.to_dataframe(index=None)) + + def to_numpy(self) -> "ndarray": + """Convert results field to numpy array.""" + return self.to_dataframe(index=None).to_numpy() + + def to_dict( + self, + orient: Literal[ + "dict", "list", "series", "split", "tight", "records", "index" + ] = "list", + ) -> Union[Dict[Hashable, Any], List[Dict[Hashable, Any]]]: + """Convert results field to a dictionary using any of Pandas `to_dict` options. + + Parameters + ---------- + orient : Literal["dict", "list", "series", "split", "tight", "records", "index"] + Value to pass to `.to_dict()` method + + Returns + ------- + Union[Dict[Hashable, Any], List[Dict[Hashable, Any]]] + Dictionary of lists or list of dictionaries if orient is "records". + """ + df = self.to_dataframe(index=None) + if ( + orient == "list" + and isinstance(self.results, dict) + and all( + isinstance(value, dict) + for value in self.results.values() # pylint: disable=no-member + ) + ): + df = df.T + results = df.to_dict(orient=orient) + if isinstance(results, dict) and orient == "list" and "index" in results: + del results["index"] + return results + + def to_llm(self) -> Union[Dict[Hashable, Any], List[Dict[Hashable, Any]]]: + """Convert results field to an LLM compatible output. + + Returns + ------- + Union[Dict[Hashable, Any], List[Dict[Hashable, Any]]] + Dictionary of lists or list of dictionaries if orient is "records". + """ + df = self.to_dataframe(index=None) + + results = df.to_json( + orient="records", + date_format="iso", + date_unit="s", + ) + + return results # type: ignore + + def show(self, **kwargs: Any) -> None: + """Display chart.""" + # pylint: disable=no-member + if not self.chart or not self.chart.fig: + raise OpenBBError("Chart not found.") + show_function: Callable = getattr(self.chart.fig, "show") + show_function(**kwargs) + + @classmethod + async def from_query(cls, query: "Query") -> "OBBject": + """Create OBBject from query. + + Parameters + ---------- + query : Query + Initialized query object. + + Returns + ------- + OBBject[ResultsType] + OBBject with results. + """ + results = await query.execute() + if isinstance(results, AnnotatedResult): + return cls( + results=results.result, extra={"results_metadata": results.metadata} + ) + return cls(results=results) diff --git a/openbb_platform/core/openbb_core/app/model/preferences.py b/openbb_platform/core/openbb_core/app/model/preferences.py new file mode 100644 index 0000000000000000000000000000000000000000..af53972484f08cfaf034dc96071ab66fb66cdf45 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/preferences.py @@ -0,0 +1,35 @@ +"""Preferences for the OpenBB platform.""" + +from pathlib import Path +from typing import Literal + +from pydantic import BaseModel, ConfigDict, Field, PositiveInt + + +class Preferences(BaseModel): + """Preferences for the OpenBB platform.""" + + cache_directory: str = str(Path.home() / "OpenBBUserData" / "cache") + chart_style: Literal["dark", "light"] = "dark" + data_directory: str = str(Path.home() / "OpenBBUserData") + export_directory: str = str(Path.home() / "OpenBBUserData" / "exports") + metadata: bool = True + output_type: Literal[ + "OBBject", "dataframe", "polars", "numpy", "dict", "chart", "llm" + ] = Field( + default="OBBject", + description="Python default output type.", + validate_default=True, + ) + request_timeout: PositiveInt = 60 + show_warnings: bool = False + table_style: Literal["dark", "light"] = "dark" + user_styles_directory: str = str(Path.home() / "OpenBBUserData" / "styles" / "user") + + model_config = ConfigDict(validate_assignment=True) + + def __repr__(self) -> str: + """Return a string representation of the model.""" + return f"{self.__class__.__name__}\n\n" + "\n".join( + f"{k}: {v}" for k, v in self.model_dump().items() + ) diff --git a/openbb_platform/core/openbb_core/app/model/profile.py b/openbb_platform/core/openbb_core/app/model/profile.py new file mode 100644 index 0000000000000000000000000000000000000000..cf5d873bfc13243ed9bcfe46b63eecc02bb0f6bf --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/profile.py @@ -0,0 +1,19 @@ +"""Profile model.""" + +from typing import Optional + +from openbb_core.app.model.hub.hub_session import HubSession +from pydantic import BaseModel, ConfigDict, Field + + +class Profile(BaseModel): + """Profile.""" + + hub_session: Optional[HubSession] = Field(default=None) + model_config = ConfigDict(validate_assignment=True) + + def __repr__(self) -> str: + """Return string representation.""" + return f"{self.__class__.__name__}\n\n" + "\n".join( + f"{k}: {v}" for k, v in self.model_dump().items() + ) diff --git a/openbb_platform/core/openbb_core/app/model/python_settings.py b/openbb_platform/core/openbb_core/app/model/python_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..8eba7cc875c1bda1544564e1433b60c7a5ea4764 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/python_settings.py @@ -0,0 +1,74 @@ +"""Python configuration settings model.""" + +from typing import List, Optional + +from pydantic import BaseModel, ConfigDict, Field, PositiveInt + + +class PythonSettings(BaseModel): + """Settings model for Python interface configuration.""" + + model_config = ConfigDict(extra="allow") + + docstring_sections: List[str] = Field( + default_factory=lambda: ["description", "parameters", "returns", "examples"], + description="Sections to include in autogenerated docstrings.", + ) + docstring_max_length: Optional[PositiveInt] = Field( + default=None, description="Maximum length of autogenerated docstrings." + ) + http: Optional[dict] = Field( + default_factory=dict, + description="HTTP settings covers all requests made by the internal, utility, functions." + + " The configuration applies to both the requests and aiohttp libraries." + + "\n " + + """Available settings: + - cafile: str - Path to a CA certificate file. + - certfile: str - Path to a client certificate file. + - keyfile: str - Path to a client key file. + - password: str - Password for the client key file. # aiohttp only + - verify_ssl: bool - Verify SSL certificates. + - fingerprint: str - SSL fingerprint. # aiohttp only + - proxy: str - Proxy URL. + - proxy_auth: str | list - Proxy authentication. # aiohttp only + - proxy_headers: dict - Proxy headers. # aiohttp only + - timeout: int - Request timeout. + - auth: str | list - Basic authentication. + - headers: dict - Request headers. + - cookies: dict - Dictionary of session cookies. + + Any additional keys supplied will be ignored unless explicitly implemented via custom code. + + The settings are passed into the `requests.Session` object and the `aiohttp.ClientSession` object by: + - `openbb_core.provider.utils.helpers.make_request` - Sync + - `openbb_core.provider.utils.helpers.amake_request` - Async + - `openbb_core.provider.utils.helpers.amake_requests` - Async (multiple requests) + - Inserted to use with YFinance & Finviz library implementations. + + Return a session object with the settings applied by: + - `openbb_core.provider.utils.helpers.get_requests_session` + - `openbb_core.provider.utils.helpers.get_async_requests_session` + """, + ) + uvicorn: Optional[dict] = Field( + default_factory=dict, + description="Uvicorn settings, covers all the launch of FastAPI when using the following entry points:" + + "\n " + + """ + - Running the FastAPI as a Python module script. + - python -m openbb_core.api.rest_api + - Running the `openbb-api` command. + - openbb-api + + All settings are passed directly to `uvicorn.run`, and can be found in the Uvicorn documentation. + - https://www.uvicorn.org/settings/ + + Keyword arguments supplied to the command line will take priority over the settings in this configuration. + """, + ) + + def __repr__(self) -> str: + """Return a string representation of the model.""" + return f"{self.__class__.__name__}\n\n" + "\n".join( + f"{k}: {v}" for k, v in self.model_dump().items() + ) diff --git a/openbb_platform/core/openbb_core/app/model/results/__init__.py b/openbb_platform/core/openbb_core/app/model/results/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..a87e3e8784fcc55c0825552f8956462572edf8c7 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/results/__init__.py @@ -0,0 +1 @@ +"""OpenBB Core App Model Results.""" diff --git a/openbb_platform/core/openbb_core/app/model/results/empty.py b/openbb_platform/core/openbb_core/app/model/results/empty.py new file mode 100644 index 0000000000000000000000000000000000000000..0f9779a681d038533cfe00b4597d5a056d126c31 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/results/empty.py @@ -0,0 +1,7 @@ +"""Empty results.""" + +from openbb_core.app.model.abstract.results import Results + + +class Empty(Results): + """Empty results.""" diff --git a/openbb_platform/core/openbb_core/app/model/system_settings.py b/openbb_platform/core/openbb_core/app/model/system_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..29d3201f84077e28528a749158537efcae437d64 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/system_settings.py @@ -0,0 +1,115 @@ +"""The OpenBB Platform System Settings.""" + +import json +import platform as pl # I do this so that the import doesn't conflict with the variable name +from pathlib import Path +from typing import List, Literal, Optional + +from openbb_core.app.constants import ( + HOME_DIRECTORY, + OPENBB_DIRECTORY, + SYSTEM_SETTINGS_PATH, + USER_SETTINGS_PATH, +) +from openbb_core.app.model.abstract.tagged import Tagged +from openbb_core.app.model.api_settings import APISettings +from openbb_core.app.model.python_settings import PythonSettings +from openbb_core.app.version import CORE_VERSION, VERSION +from pydantic import ConfigDict, Field, field_validator, model_validator + + +class SystemSettings(Tagged): + """System settings model.""" + + # System section + os: str = str(pl.system()) + python_version: str = str(pl.python_version()) + platform: str = str(pl.platform()) + + # OpenBB section + version: str = VERSION + core: str = CORE_VERSION + home_directory: str = str(HOME_DIRECTORY) + openbb_directory: str = str(OPENBB_DIRECTORY) + user_settings_path: str = str(USER_SETTINGS_PATH) + system_settings_path: str = str(SYSTEM_SETTINGS_PATH) + + # Logging section + logging_app_name: Literal["platform"] = "platform" + logging_commit_hash: Optional[str] = None + logging_frequency: Literal["D", "H", "M", "S"] = "H" + logging_handlers: List[str] = Field(default_factory=lambda: ["file"]) + logging_rolling_clock: bool = False + logging_verbosity: int = 20 + logging_sub_app: Literal["python", "api", "pro", "cli"] = "python" + logging_suppress: bool = False + log_collect: bool = True + + # API section + api_settings: APISettings = Field(default_factory=APISettings) + + # Python section + python_settings: PythonSettings = Field(default_factory=PythonSettings) + + # Others + debug_mode: bool = False + test_mode: bool = False + headless: bool = False + + model_config = ConfigDict(validate_assignment=True, frozen=True) + + def __repr__(self) -> str: + """Return a string representation of the model.""" + return f"{self.__class__.__name__}\n\n" + "\n".join( + f"{k}: {v}" for k, v in self.model_dump().items() + ) + + @staticmethod + def create_json(path: Path, template: Optional[dict] = None) -> None: + """Create an empty JSON file.""" + path.write_text(json.dumps(obj=template or {}, indent=4), encoding="utf-8") + + # TODO: Figure out why this works only opposite to what the docs say + # https://docs.pydantic.dev/latest/concepts/validators/#model-validators + # based on docs first argument should be self, but it works only with cls + @model_validator(mode="after") # type: ignore + @classmethod + def create_openbb_directory(cls, values: "SystemSettings") -> "SystemSettings": + """Create the OpenBB directory if it doesn't exist.""" + obb_dir = Path(values.openbb_directory).resolve() + user_settings = Path(values.user_settings_path).resolve() + system_settings = Path(values.system_settings_path).resolve() + obb_dir.mkdir(parents=True, exist_ok=True) + + if not user_settings.exists(): + cls.create_json( + user_settings, + {"credentials": {}, "preferences": {}, "defaults": {"commands": {}}}, + ) + + if not system_settings.exists(): + cls.create_json(system_settings, {}) + + return values + + @model_validator(mode="after") # type: ignore + @classmethod + def validate_posthog_handler(cls, values: "SystemSettings") -> "SystemSettings": + """If the user has enabled log collection, then we need to add the Posthog.""" + if ( + not any([values.test_mode, values.debug_mode, values.logging_suppress]) + and values.log_collect + and "posthog" not in values.logging_handlers + ): + values.logging_handlers.append("posthog") + + return values + + @field_validator("logging_handlers") + @classmethod + def validate_logging_handlers(cls, v): + """Validate the logging handlers.""" + for value in v: + if value not in ["stdout", "stderr", "noop", "file", "posthog"]: + raise ValueError("Invalid logging handler") + return v diff --git a/openbb_platform/core/openbb_core/app/model/user_settings.py b/openbb_platform/core/openbb_core/app/model/user_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..d769096d72a533e95b251048ab272ad46edb993b --- /dev/null +++ b/openbb_platform/core/openbb_core/app/model/user_settings.py @@ -0,0 +1,49 @@ +"""User settings model.""" + +import json +import os +import warnings + +from openbb_core.app.constants import USER_SETTINGS_PATH +from openbb_core.app.model.abstract.tagged import Tagged +from openbb_core.app.model.credentials import Credentials +from openbb_core.app.model.defaults import Defaults +from openbb_core.app.model.preferences import Preferences +from openbb_core.app.model.profile import Profile +from pydantic import Field + + +class UserSettings(Tagged): + """User settings.""" + + profile: Profile = Field(default_factory=Profile) + credentials: Credentials = Field(default_factory=Credentials) + preferences: Preferences = Field(default_factory=Preferences) + defaults: Defaults = Field(default_factory=Defaults) + + def __init__(self, **kwargs): + """Initialize user settings by loading directly from file if it exists.""" + # Check if user settings file exists and load from it + if os.path.exists(USER_SETTINGS_PATH): + try: + with open(USER_SETTINGS_PATH) as f: + file_settings = json.load(f) + # Initialize with settings from file + super().__init__(**{k: v for k, v in file_settings.items() if v}) + except (json.JSONDecodeError, OSError) as e: + warnings.warn( + f"Error loading user settings from file: {e}", + stacklevel=2, + category=UserWarning, + ) + # Fall back to defaults if file can't be read + super().__init__(**kwargs) + else: + # Use defaults if file doesn't exist + super().__init__(**kwargs) + + def __repr__(self) -> str: + """Human readable representation of the object.""" + return f"{self.__class__.__name__}\n\n" + "\n".join( + f"{k}: {v}" for k, v in self.model_dump().items() + ) diff --git a/openbb_platform/core/openbb_core/app/provider_interface.py b/openbb_platform/core/openbb_core/app/provider_interface.py new file mode 100644 index 0000000000000000000000000000000000000000..fc2ae1cd7a4fa2ed297546c431c34d2c8e29a001 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/provider_interface.py @@ -0,0 +1,661 @@ +"""Provider Interface.""" + +from dataclasses import dataclass, make_dataclass +from difflib import SequenceMatcher +from typing import ( + Annotated, + Any, + Callable, + Dict, + List, + Literal, + Optional, + Tuple, + Type, + Union, +) + +from fastapi import Body, Query +from openbb_core.app.model.abstract.singleton import SingletonMeta +from openbb_core.app.model.obbject import OBBject +from openbb_core.provider.query_executor import QueryExecutor +from openbb_core.provider.registry_map import MapType, RegistryMap +from openbb_core.provider.utils.helpers import to_snake_case +from pydantic import ( + BaseModel, + ConfigDict, + Discriminator, + Field, + SerializeAsAny, + Tag, + create_model, +) +from pydantic.fields import FieldInfo + +TupleFieldType = Tuple[str, Optional[Type], Optional[Any]] + + +@dataclass +class DataclassField: + """Dataclass field.""" + + name: str + annotation: Optional[Type] + default: Optional[Any] + + +@dataclass +class StandardParams: + """Standard params dataclass.""" + + +@dataclass +class ExtraParams: + """Extra params dataclass.""" + + +class StandardData(BaseModel): + """Standard data model.""" + + +class ExtraData(BaseModel): + """Extra data model.""" + + +@dataclass +class ProviderChoices: + """Provider choices dataclass.""" + + provider: Literal # type: ignore + + +class ProviderInterface(metaclass=SingletonMeta): + """Provider interface class. + + Properties + ---------- + map : MapType + Dictionary of provider information. + credentials: List[str] + List of credentials. + model_providers : Dict[str, ProviderChoices] + Dictionary of provider choices by model. + params : Dict[str, Dict[str, Union[StandardParams, ExtraParams]]] + Dictionary of params by model. + return_schema : Dict[str, Type[BaseModel]] + Dictionary of return data schema by model. + available_providers : List[str] + List of available providers. + provider_choices : ProviderChoices + Dataclass with literal of provider names. + models : List[str] + List of model names. + + Methods + ------- + create_executor : QueryExecutor + Create a query executor + """ + + def __init__( + self, + registry_map: Optional[RegistryMap] = None, + query_executor: Optional[QueryExecutor] = None, + ) -> None: + """Initialize provider interface.""" + self._registry_map = registry_map or RegistryMap() + self._query_executor = query_executor or QueryExecutor + + self._map = self._registry_map.standard_extra + # TODO: Try these 4 methods in a single iteration + self._model_providers_map = self._generate_model_providers_dc(self._map) + self._params = self._generate_params_dc(self._map) + self._data = self._generate_data_dc(self._map) + self._return_schema = self._generate_return_schema(self._data) + self._return_annotations = self._generate_return_annotations( + self._registry_map.original_models + ) + + self._available_providers = self._registry_map.available_providers + self._provider_choices = self._get_provider_choices(self._available_providers) + + @property + def map(self) -> MapType: + """Dictionary of provider information.""" + return self._map + + @property + def credentials(self) -> Dict[str, List[str]]: + """Map providers to credentials.""" + return self._registry_map.credentials + + @property + def model_providers(self) -> Dict[str, ProviderChoices]: + """Dictionary of provider choices by model.""" + return self._model_providers_map + + @property + def params(self) -> Dict[str, Dict[str, Union[StandardParams, ExtraParams]]]: + """Dictionary of params by model.""" + return self._params + + @property + def data(self) -> Dict[str, Dict[str, Union[StandardData, ExtraData]]]: + """Dictionary of data by model.""" + return self._data + + @property + def return_schema(self) -> Dict[str, Type[BaseModel]]: + """Dictionary of data by model merged.""" + return self._return_schema + + @property + def available_providers(self) -> List[str]: + """List of available providers.""" + return self._available_providers + + @property + def provider_choices(self) -> type: + """Dataclass with literal of provider names.""" + return self._provider_choices + + @property + def models(self) -> List[str]: + """List of model names.""" + return self._registry_map.models + + @property + def return_annotations(self) -> Dict[str, Type[OBBject]]: + """Return map.""" + return self._return_annotations + + def create_executor(self) -> QueryExecutor: + """Get query executor.""" + return self._query_executor(self._registry_map.registry) # type: ignore[operator] + + @staticmethod + def _merge_fields( + current: DataclassField, incoming: DataclassField, query: bool = False + ) -> DataclassField: + """Merge 2 dataclass fields.""" + curr_name = current.name + curr_type: Optional[Type] = current.annotation + curr_desc = getattr(current.default, "description", "") + curr_json_schema_extra = getattr(current.default, "json_schema_extra", {}) + + inc_type: Optional[Type] = incoming.annotation + inc_desc = getattr(incoming.default, "description", "") + inc_json_schema_extra = getattr(incoming.default, "json_schema_extra", {}) + + def split_desc(desc: str) -> str: + """Split field description.""" + item = desc.split(" (provider: ") + detail = item[0] if item else "" + return detail + + def merge_json_schema_extra(curr: dict, inc: dict) -> dict: + """Merge json schema extra.""" + for key in curr.keys() & inc.keys(): + # Merge keys that are in both dictionaries if both are lists + curr_value = curr[key] + inc_value = inc[key] + if isinstance(curr_value, list) and isinstance(inc_value, list): + curr[key] = list(set(curr.get(key, []) + inc.get(key, []))) + inc.pop(key) + + # Add any remaining keys from inc to curr + curr.update(inc) + return curr + + json_schema_extra: dict = merge_json_schema_extra( + curr=curr_json_schema_extra or {}, inc=inc_json_schema_extra or {} + ) + + curr_detail = split_desc(curr_desc) + inc_detail = split_desc(inc_desc) + + curr_title = getattr(current.default, "title", "") + inc_title = getattr(incoming.default, "title", "") + providers = ",".join([curr_title, inc_title]) + formatted_prov = providers.replace(",", ", ") + + if SequenceMatcher(None, curr_detail, inc_detail).ratio() > 0.8: + new_desc = f"{curr_detail} (provider: {formatted_prov})" + else: + new_desc = f"{curr_desc};\n {inc_desc}" + + QF: Callable = Query if query else FieldInfo # type: ignore[assignment] + merged_default = QF( + default=getattr(current.default, "default", None), + title=providers, + description=new_desc, + json_schema_extra=json_schema_extra, + ) + + merged_type: Optional[Type] = ( + Union[curr_type, inc_type] # type: ignore[assignment] + if curr_type != inc_type + else curr_type + ) + + return DataclassField(curr_name, merged_type, merged_default) + + @staticmethod + def _create_field( + name: str, + field: FieldInfo, + provider_name: Optional[str] = None, + query: bool = False, + force_optional: bool = False, + ) -> DataclassField: + new_name = name.replace(".", "_") + annotation = field.annotation + + additional_description = "" + choices: Dict = {} + if extra := field.json_schema_extra: + providers: List = [] + for p, v in extra.items(): # type: ignore + if isinstance(v, dict) and v.get("multiple_items_allowed"): + providers.append(p) + choices[p] = {"multiple_items_allowed": True, "choices": v.get("choices")} # type: ignore + elif isinstance(v, list) and "multiple_items_allowed" in v: + # For backwards compatibility, before this was a list + providers.append(p) + choices[p] = {"multiple_items_allowed": True, "choices": None} # type: ignore + elif isinstance(v, dict) and v.get("choices"): + choices[p] = { + "multiple_items_allowed": False, + "choices": v.get("choices"), + } + + if isinstance(v, dict) and v.get("x-widget_config"): + if p not in choices: + choices[p] = {"x-widget_config": v.get("x-widget_config")} + else: + choices[p]["x-widget_config"] = v.get("x-widget_config") + + if providers: + if provider_name: + additional_description += " Multiple comma separated items allowed." + else: + additional_description += ( + " Multiple comma separated items allowed for provider(s): " + + ", ".join(providers) # type: ignore[arg-type] + + "." + ) + provider_field = ( + f"(provider: {provider_name})" if provider_name != "openbb" else "" + ) + description = ( + f"{field.description}{additional_description} {provider_field}" + if provider_name and field.description + else f"{field.description}{additional_description}" + ) + + if field.is_required(): + if force_optional: + annotation = Optional[annotation] # type: ignore + default = None + else: + default = ... + else: + default = field.default + + if ( + hasattr(annotation, "__name__") + and annotation.__name__ in ["Dict", "dict", "Data"] # type: ignore + or field.kw_only is True + ): + return DataclassField( + new_name, + annotation, + Body( + default=default, + title=provider_name, + description=description, + alias=field.alias or None, + json_schema_extra=choices, + ), + ) + + if query: + # We need to use query if we want the field description to show + # up in the swagger, it's a fastapi limitation + return DataclassField( + new_name, + annotation, + Query( + default=default, + title=provider_name, + description=description, + alias=field.alias or None, + json_schema_extra=choices, + ), + ) + if provider_name: + return DataclassField( + new_name, + annotation, + Field( + default=default or None, + title=provider_name, + description=description, + json_schema_extra=choices, + ), + ) + + return DataclassField(new_name, annotation, default) + + @classmethod + def _extract_params( + cls, + providers: Any, + ) -> Tuple[Dict[str, TupleFieldType], Dict[str, TupleFieldType]]: + """Extract parameters from map.""" + standard: Dict[str, TupleFieldType] = {} + extra: Dict[str, TupleFieldType] = {} + + for provider_name, model_details in providers.items(): + if provider_name == "openbb": + for name, field in model_details["QueryParams"]["fields"].items(): + incoming = cls._create_field(name, field, query=True) + + standard[incoming.name] = ( + incoming.name, + incoming.annotation, + incoming.default, + ) + else: + for name, field in model_details["QueryParams"]["fields"].items(): + if name not in providers["openbb"]["QueryParams"]["fields"]: + s_name = to_snake_case(name) + incoming = cls._create_field( + s_name, + field, + provider_name, + query=True, + force_optional=True, + ) + + if incoming.name in extra: + current = DataclassField(*extra[incoming.name]) + updated = cls._merge_fields(current, incoming, query=True) + else: + updated = incoming + + extra[updated.name] = ( + updated.name, + updated.annotation, + updated.default, + ) + + return standard, extra + + @classmethod + def _extract_data( + cls, + providers: Any, + ) -> Tuple[Dict[str, TupleFieldType], Dict[str, TupleFieldType]]: + standard: Dict[str, TupleFieldType] = {} + extra: Dict[str, TupleFieldType] = {} + + for provider_name, model_details in providers.items(): + if provider_name == "openbb": + for name, field in model_details["Data"]["fields"].items(): + if ( + name == "provider" + and field.description == "The data provider for the data." + ): # noqa + continue + incoming = cls._create_field(name, field, "openbb") + + standard[incoming.name] = ( + incoming.name, + incoming.annotation, + incoming.default, + ) + else: + for name, field in model_details["Data"]["fields"].items(): + if name not in providers["openbb"]["Data"]["fields"]: + if ( + name == "provider" + and field.description == "The data provider for the data." + ): # noqa + continue + incoming = cls._create_field( + to_snake_case(name), + field, + provider_name, + force_optional=True, + ) + + if incoming.name in extra: + current = DataclassField(*extra[incoming.name]) + updated = cls._merge_fields(current, incoming) + else: + updated = incoming + + extra[updated.name] = ( + updated.name, + updated.annotation, + updated.default, + ) + + return standard, extra + + def _generate_params_dc( + self, map_: MapType + ) -> Dict[str, Dict[str, Union[StandardParams, ExtraParams]]]: + """Generate dataclasses for params. + + This creates a dictionary of dataclasses that can be injected as a FastAPI + dependency. + + Example + ------- + @dataclass + class CompanyNews(StandardParams): + symbols: str = Query(...) + page: int = Query(default=1) + + @dataclass + class CompanyNews(ExtraParams): + pageSize: int = Query(default=15, title="benzinga") + displayOutput: int = Query(default="headline", title="benzinga") + ... + sort: str = Query(default=None, title="benzinga,polygon") + """ + result: Dict = {} + + for model_name, providers in map_.items(): + standard: dict + extra: dict + standard, extra = self._extract_params(providers) + + result[model_name] = { + "standard": make_dataclass( + cls_name=model_name, + fields=list(standard.values()), # type: ignore[arg-type] + bases=(StandardParams,), + ), + "extra": make_dataclass( + cls_name=model_name, + fields=list(extra.values()), # type: ignore[arg-type] + bases=(ExtraParams,), + ), + } + return result + + def _generate_model_providers_dc(self, map_: MapType) -> Dict[str, ProviderChoices]: + """Generate dataclasses for provider choices by model. + + This creates a dictionary that maps model names to dataclasses that can be + injected as a FastAPI dependency. + + Example + ------- + @dataclass + class CompanyNews(ProviderChoices): + provider: Literal["benzinga", "polygon"] + """ + result: Dict = {} + + for model_name, providers in map_.items(): + choices = sorted(list(providers.keys())) + if "openbb" in choices: + choices.remove("openbb") + + result[model_name] = make_dataclass( # type: ignore + cls_name=model_name, + fields=[ + ( + "provider", + Literal[tuple(choices)], # type: ignore + ... if len(choices) > 1 else choices[0], + ) + ], + bases=(ProviderChoices,), + ) + + return result + + def _generate_data_dc( + self, map_: MapType + ) -> Dict[str, Dict[str, Union[StandardData, ExtraData]]]: + """Generate dataclasses for data. + + This creates a dictionary of dataclasses. + + Example + ------- + class EquityHistoricalData(StandardData): + date: date + open: PositiveFloat + high: PositiveFloat + low: PositiveFloat + close: PositiveFloat + adj_close: Optional[PositiveFloat] + volume: PositiveFloat + """ + result: Dict = {} + + for model_name, providers in map_.items(): + standard: dict + extra: dict + standard, extra = self._extract_data(providers) + result[model_name] = { + "standard": make_dataclass( + cls_name=model_name, + fields=list(standard.values()), # type: ignore[arg-type] + bases=(StandardData,), + ), + "extra": make_dataclass( + cls_name=model_name, + fields=list(extra.values()), # type: ignore[arg-type] + bases=(ExtraData,), + ), + } + + return result + + def _generate_return_schema( + self, + data: Dict[str, Dict[str, Union[StandardData, ExtraData]]], + ) -> Dict[str, Type[BaseModel]]: + """Merge standard data with extra data into a single BaseModel to be injected as FastAPI dependency.""" + result: Dict = {} + for model_name, dataclasses in data.items(): + standard = dataclasses["standard"] + extra = dataclasses["extra"] + + fields = standard.model_fields.copy() + fields.update(extra.model_fields) + + fields_dict: Dict[str, Tuple[Any, Any]] = {} + + for name, field in fields.items(): + fields_dict[name] = ( + field.annotation, + Field( + default=field.default, + title=field.title, + description=field.description, + alias=field.alias, + json_schema_extra=field.json_schema_extra, + ), + ) + + model_config = ConfigDict(extra="allow", populate_by_name=True) + + result[model_name] = create_model( # type: ignore + model_name, + __config__=model_config, + **fields_dict, # type: ignore + ) + + return result + + def _get_provider_choices(self, available_providers: List[str]) -> type: + return make_dataclass( + cls_name="ProviderChoices", + fields=[("provider", Literal[tuple(available_providers)])], # type: ignore + bases=(ProviderChoices,), + ) + + def _generate_return_annotations( + self, original_models: Dict[str, Dict[str, Any]] + ) -> Dict[str, Type[OBBject]]: + """Generate return annotations for FastAPI. + + Example + ------- + class Data(BaseModel): + ... + + class EquityData(Data): + price: float + + class YFEquityData(EquityData): + yf_field: str + + class AVEquityData(EquityData): + av_field: str + + class OBBject(BaseModel): + results: List[ + SerializeAsAny[ + Annotated[ + Union[ + Annotated[YFEquityData, Tag("yf")], + Annotated[AVEquityData, Tag("av")], + ], + Discriminator(get_provider), + ] + ] + ] + """ + + def get_provider(v: Type[BaseModel]): + """Callable to discriminate which BaseModel to use.""" + return getattr(v, "_provider", None) + + annotations = {} + for name, models in original_models.items(): + outer = set() + args = set() + for provider, model in models.items(): + data = model["data"] + outer.add(model["results_type"]) + args.add(Annotated[data, Tag(provider)]) + # We set the provider to use it in discriminator function + setattr(data, "_provider", provider) + meta = Discriminator(get_provider) if len(args) > 1 else None + inner = SerializeAsAny[Annotated[Union[tuple(args)], meta]] # type: ignore[misc,valid-type] + full = Union[tuple((o[inner] if o else inner) for o in outer)] # type: ignore[valid-type,misc] + annotations[name] = create_model( + f"OBBject_{name}", + __base__=OBBject[full], # type: ignore[valid-type] + __doc__=f"OBBject with results of type {name}", + ) + return annotations diff --git a/openbb_platform/core/openbb_core/app/query.py b/openbb_platform/core/openbb_core/app/query.py new file mode 100644 index 0000000000000000000000000000000000000000..84d7a9aa03e59a21bfb0e184e348233fa3df05d9 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/query.py @@ -0,0 +1,82 @@ +"""Query class.""" + +import warnings +from dataclasses import asdict +from typing import Any, Dict + +from openbb_core.app.model.abstract.warning import OpenBBWarning +from openbb_core.app.model.command_context import CommandContext +from openbb_core.app.provider_interface import ( + ExtraParams, + ProviderChoices, + ProviderInterface, + StandardParams, +) + + +class Query: + """Query class.""" + + def __init__( + self, + cc: CommandContext, + provider_choices: ProviderChoices, + standard_params: StandardParams, + extra_params: ExtraParams, + ) -> None: + """Initialize Query class.""" + self.cc = cc + original = asdict(provider_choices) + self.provider = original.get("provider") + self.standard_params = standard_params + self.extra_params = extra_params + self.name = self.standard_params.__class__.__name__ + self.provider_interface = ProviderInterface() + + def filter_extra_params( + self, + extra_params: ExtraParams, + provider_name: str, + ) -> Dict[str, Any]: + """Filter extra params based on the provider and warn if not supported.""" + original = asdict(extra_params) + filtered = {} + + query = extra_params.__class__.__name__ + fields = asdict(self.provider_interface.params[query]["extra"]()) # type: ignore + + for k, v in original.items(): + f = fields[k] + providers = f.title.split(",") if hasattr(f, "title") else [] + + # We only filter/warn if the value is not the default, because fastapi + # Depends always sends the default value, even if it's not in the request. + if v != f.default: + if provider_name in providers: + filtered[k] = v + else: + available = ", ".join(providers) + warnings.warn( + message=f"Parameter '{k}' is not supported by {provider_name}. Available for: {available}.", + category=OpenBBWarning, + ) + + return filtered + + async def execute(self) -> Any: + """Execute the query.""" + standard_dict = asdict(self.standard_params) + extra_dict = ( + self.filter_extra_params(self.extra_params, self.provider) # type: ignore + if self.extra_params + else {} + ) + query_executor = self.provider_interface.create_executor() + + return await query_executor.execute( + provider_name=self.provider, + model_name=self.name, + params={**standard_dict, **extra_dict}, + credentials=self.cc.user_settings.credentials.model_dump(), + preferences=self.cc.user_settings.preferences.model_dump(), + ) diff --git a/openbb_platform/core/openbb_core/app/router.py b/openbb_platform/core/openbb_core/app/router.py new file mode 100644 index 0000000000000000000000000000000000000000..e7f62251d14ba176d4f1ece231b8244e4fbc8183 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/router.py @@ -0,0 +1,524 @@ +"""OpenBB Router.""" + +import traceback +import warnings +from functools import lru_cache +from inspect import isclass +from typing import ( + Any, + Callable, + Dict, + List, + Optional, + Type, + get_args, + get_origin, + get_type_hints, + overload, +) + +from fastapi import APIRouter, Depends +from openbb_core.app.deprecation import DeprecationSummary, OpenBBDeprecationWarning +from openbb_core.app.extension_loader import ExtensionLoader +from openbb_core.app.model.abstract.warning import OpenBBWarning +from openbb_core.app.model.example import filter_list +from openbb_core.app.model.obbject import OBBject +from openbb_core.app.provider_interface import ( + ExtraParams, + ProviderChoices, + ProviderInterface, + StandardParams, +) +from openbb_core.env import Env +from pydantic import BaseModel +from typing_extensions import Annotated, ParamSpec + +P = ParamSpec("P") + + +class OpenBBErrorResponse(BaseModel): + """OpenBB Error Response.""" + + detail: str + error_kind: str + + +class Router: + """OpenBB Router Class.""" + + @property + def api_router(self) -> APIRouter: + """API Router.""" + return self._api_router + + @property + def prefix(self) -> str: + """Prefix.""" + return self._api_router.prefix + + @property + def description(self) -> Optional[str]: + """Description.""" + return self._description + + @property + def routers(self) -> Dict[str, "Router"]: + """Routers nested within the Router, i.e. sub-routers.""" + return self._routers + + def __init__( + self, + prefix: str = "", + description: Optional[str] = None, + ) -> None: + """Initialize Router.""" + self._api_router = APIRouter( + prefix=prefix, + responses={404: {"description": "Not found"}}, + ) + self._description = description + self._routers: Dict[str, Router] = {} + + @overload + def command(self, func: Optional[Callable[P, OBBject]]) -> Callable[P, OBBject]: + pass + + @overload + def command(self, **kwargs) -> Callable: + pass + + def command( + self, + func: Optional[Callable[P, OBBject]] = None, + **kwargs, + ) -> Optional[Callable]: + """Command decorator for routes.""" + if func is None: + return lambda f: self.command(f, **kwargs) + + api_router = self._api_router + + model = kwargs.pop("model", "") + no_validate = kwargs.pop("no_validate", None) + if no_validate is True: + func.__annotations__["return"] = None + if func := SignatureInspector.complete(func, model): + + kwargs["response_model_exclude_unset"] = True + kwargs["openapi_extra"] = kwargs.get("openapi_extra", {}) + kwargs["openapi_extra"]["model"] = model + kwargs["openapi_extra"]["examples"] = filter_list( + examples=kwargs.pop("examples", []), + providers=ProviderInterface().available_providers, + ) + kwargs["openapi_extra"]["no_validate"] = no_validate + kwargs["operation_id"] = kwargs.get( + "operation_id", SignatureInspector.get_operation_id(func) + ) + kwargs["path"] = kwargs.get("path", f"/{func.__name__}") + kwargs["endpoint"] = func + kwargs["methods"] = kwargs.get("methods", ["GET"]) + kwargs["response_model"] = ( + kwargs.get( + "response_model", + func.__annotations__["return"], # type: ignore + ) + if not no_validate + else func.__annotations__["return"] + ) + kwargs["response_model_by_alias"] = kwargs.get( + "response_model_by_alias", False + ) + kwargs["description"] = SignatureInspector.get_description(func) + kwargs["responses"] = kwargs.get( + "responses", + { + 204: { + "description": "Empty response", + }, + 400: { + "model": OpenBBErrorResponse, + "description": "No Results Found", + }, + 404: {"description": "Not found"}, + 500: { + "model": OpenBBErrorResponse, + "description": "Internal Error", + }, + 502: { + "model": OpenBBErrorResponse, + "description": "Unauthorized", + }, + }, + ) + + # For custom deprecation + if kwargs.get("deprecated", False): + deprecation: OpenBBDeprecationWarning = kwargs.pop("deprecation") + + kwargs["summary"] = DeprecationSummary( + deprecation.long_message, deprecation + ) + + api_router.add_api_route(**kwargs) + + return func + + def include_router( + self, + router: "Router", + prefix: str = "", + ): + """Include router.""" + tags = [prefix.strip("/")] if prefix else None + self._api_router.include_router( + router=router.api_router, prefix=prefix, tags=tags # type: ignore + ) + name = prefix if prefix else router.prefix + self._routers[name.strip("/")] = router + + def get_attr(self, path: str, attr: str) -> Any: + """Get router attribute from path. + + Parameters + ---------- + path : str + Path to the router or nested router. + E.g. "/equity" or "/equity/price". + attr : str + Attribute to get. + + Returns + ------- + Any + Attribute value. + """ + return self._search_attr(self, path, attr) + + @staticmethod + def _search_attr(router: "Router", path: str, attr: str) -> Any: + """Recursively search router attribute from path.""" + path = path.strip("/") + first = path.split("/")[0] + if first in router.routers: + return Router._search_attr( + router.routers[first], "/".join(path.split("/")[1:]), attr + ) + return getattr(router, attr, None) + + +class SignatureInspector: + """Inspect function signature.""" + + @classmethod + def complete( + cls, func: Callable[P, OBBject], model: str + ) -> Optional[Callable[P, OBBject]]: + """Complete function signature.""" + if isclass(return_type := func.__annotations__["return"]) and not issubclass( + return_type, OBBject + ): + return func + + provider_interface = ProviderInterface() + + if model: + if model not in provider_interface.models: + if Env().DEBUG_MODE: + warnings.warn( + message=f"\nSkipping api route '/{func.__name__}'.\n" + f"Model '{model}' not found.\n\n" + "Check available models in ProviderInterface().models", + category=OpenBBWarning, + ) + return None + cls.validate_signature( + func, + { + "provider_choices": ProviderChoices, + "standard_params": StandardParams, + "extra_params": ExtraParams, + }, + ) + + func = cls.inject_dependency( + func=func, + arg="provider_choices", + callable_=provider_interface.model_providers[model], + ) + + func = cls.inject_dependency( + func=func, + arg="standard_params", + callable_=provider_interface.params[model]["standard"], + ) + + func = cls.inject_dependency( + func=func, + arg="extra_params", + callable_=provider_interface.params[model]["extra"], + ) + + func = cls.inject_return_annotation( + func=func, + annotation=provider_interface.return_annotations[model], + ) + + else: + func = cls.polish_return_schema(func) + if ( + "provider_choices" in func.__annotations__ + and func.__annotations__["provider_choices"] == ProviderChoices + ): + func = cls.inject_dependency( + func=func, + arg="provider_choices", + callable_=provider_interface.provider_choices, + ) + + return func + + @staticmethod + def polish_return_schema(func: Callable[P, OBBject]) -> Callable[P, OBBject]: + """Polish API schemas by filling `__doc__` and `__name__`.""" + return_type = func.__annotations__["return"] + is_list = False + + if return_type == OBBject: + results_type = get_type_hints(return_type)["results"] + results_type_args = get_args(results_type) + if not isinstance(results_type, type(None)): + results_type = results_type_args[0] + + is_list = isinstance(get_origin(results_type), list) + inner_type = ( + results_type_args[0] if is_list and results_type_args else results_type + ) + inner_type_name = getattr(inner_type, "__name__", inner_type) + + func.__annotations__["return"].__doc__ = "OBBject" + func.__annotations__["return"].__name__ = f"OBBject[{inner_type_name}]" + + return func + + @staticmethod + def validate_signature( + func: Callable[P, OBBject], expected: Dict[str, type] + ) -> None: + """Validate function signature before binding to model.""" + for k, v in expected.items(): + if k not in func.__annotations__: + raise AttributeError( + f"Invalid signature: '{func.__name__}'. Missing '{k}' parameter." + ) + + if func.__annotations__[k] != v: + raise TypeError( + f"Invalid signature: '{func.__name__}'. '{k}' parameter must be of type '{v.__name__}'." + ) + + @staticmethod + def inject_dependency( + func: Callable[P, OBBject], arg: str, callable_: Any + ) -> Callable[P, OBBject]: + """Annotate function with dependency injection.""" + func.__annotations__[arg] = Annotated[callable_, Depends()] # type: ignore + return func + + @staticmethod + def inject_return_annotation( + func: Callable[P, OBBject], annotation: Type[OBBject] + ) -> Callable[P, OBBject]: + """Annotate function with return annotation.""" + func.__annotations__["return"] = annotation + return func + + @staticmethod + def get_description(func: Callable) -> str: + """Get description from docstring.""" + doc = func.__doc__ + if doc: + description = doc.split(" Parameters\n ----------")[0] + description = description.split(" Returns\n -------")[0] + description = description.split(" Examples\n -------")[0] + description = "\n".join([line.strip() for line in description.split("\n")]) + + return description + return "" + + @staticmethod + def get_operation_id(func: Callable, sep: str = "_") -> str: + """Get operation id.""" + operation_id = [ + t.replace("_router", "").replace("openbb_", "") + for t in func.__module__.split(".") + [func.__name__] + ] + cleaned_id = sep.join({c: "" for c in operation_id if c}.keys()) + return cleaned_id + + +class CommandMap: + """Matching Routes with Commands.""" + + def __init__( + self, router: Optional[Router] = None, coverage_sep: Optional[str] = None + ) -> None: + """Initialize CommandMap.""" + self._router = router or RouterLoader.from_extensions() + self._map = self.get_command_map(router=self._router) + self._provider_coverage: Dict[str, List[str]] = {} + self._command_coverage: Dict[str, List[str]] = {} + self._commands_model: Dict[str, str] = {} + self._coverage_sep = coverage_sep + + @property + def map(self) -> Dict[str, Callable]: + """Get command map.""" + return self._map + + @property + def provider_coverage(self) -> Dict[str, List[str]]: + """Get provider coverage.""" + if not self._provider_coverage: + self._provider_coverage = self.get_provider_coverage( + router=self._router, sep=self._coverage_sep + ) + return self._provider_coverage + + @property + def command_coverage(self) -> Dict[str, List[str]]: + """Get command coverage.""" + if not self._command_coverage: + self._command_coverage = self.get_command_coverage( + router=self._router, sep=self._coverage_sep + ) + return self._command_coverage + + @property + def commands_model(self) -> Dict[str, str]: + """Get commands model.""" + if not self._commands_model: + self._commands_model = self.get_commands_model( + router=self._router, sep=self._coverage_sep + ) + return self._commands_model + + @staticmethod + def get_command_map( + router: Router, + ) -> Dict[str, Callable]: + """Get command map.""" + api_router = router.api_router + command_map = {route.path: route.endpoint for route in api_router.routes} # type: ignore + return command_map + + @staticmethod + def get_provider_coverage( + router: Router, sep: Optional[str] = None + ) -> Dict[str, List[str]]: + """Get provider coverage.""" + api_router = router.api_router + + mapping = ProviderInterface().map + + coverage_map: Dict[Any, Any] = {} + for route in api_router.routes: + openapi_extra = getattr(route, "openapi_extra", None) + if openapi_extra: + model = openapi_extra.get("model", None) + if model: + providers = list(mapping[model].keys()) + if "openbb" in providers: + providers.remove("openbb") + for provider in providers: + if provider not in coverage_map: + coverage_map[provider] = [] + if hasattr(route, "path"): + rp = ( + route.path # type: ignore + if sep is None + else route.path.replace("/", sep) # type: ignore + ) + coverage_map[provider].append(rp) + + return coverage_map + + @staticmethod + def get_command_coverage( + router: Router, sep: Optional[str] = None + ) -> Dict[str, List[str]]: + """Get command coverage.""" + api_router = router.api_router + + mapping = ProviderInterface().map + + coverage_map: Dict[Any, Any] = {} + for route in api_router.routes: + openapi_extra = getattr(route, "openapi_extra") + if openapi_extra: + model = openapi_extra.get("model", None) + if model: + providers = list(mapping[model].keys()) + if "openbb" in providers: + providers.remove("openbb") + + if hasattr(route, "path"): + rp = ( + route.path if sep is None else route.path.replace("/", sep) # type: ignore + ) + if route.path not in coverage_map: # type: ignore + coverage_map[rp] = [] + coverage_map[rp] = providers + return coverage_map + + @staticmethod + def get_commands_model(router: Router, sep: Optional[str] = None) -> Dict[str, str]: + """Get commands model.""" + api_router = router.api_router + + coverage_map: Dict[Any, Any] = {} + for route in api_router.routes: + openapi_extra = getattr(route, "openapi_extra") + if openapi_extra: + model = openapi_extra.get("model", None) + if model and hasattr(route, "path"): + rp = ( + route.path if sep is None else route.path.replace("/", sep) # type: ignore + ) + if route.path not in coverage_map: # type: ignore + coverage_map[rp] = [] + coverage_map[rp] = model + return coverage_map + + def get_command(self, route: str) -> Optional[Callable]: + """Get command from route.""" + return self._map.get(route, None) + + +class LoadingError(Exception): + """Error loading extension.""" + + +class RouterLoader: + """Router Loader.""" + + @staticmethod + @lru_cache + def from_extensions() -> Router: + """Load routes from extensions.""" + router = Router() + + for name, entry in ExtensionLoader().core_objects.items(): # type: ignore[attr-defined] + try: + router.include_router(router=entry, prefix=f"/{name}") + except Exception as e: + msg = f"Error loading extension: {name}\n" + if Env().DEBUG_MODE: + traceback.print_exception(type(e), e, e.__traceback__) + raise LoadingError(msg + f"\033[91m{e}\033[0m") from e + warnings.warn( + message=msg, + category=OpenBBWarning, + ) + + return router diff --git a/openbb_platform/core/openbb_core/app/service/auth_service.py b/openbb_platform/core/openbb_core/app/service/auth_service.py new file mode 100644 index 0000000000000000000000000000000000000000..3bbe97c92dcb75ccaa58b0fdbd9a6dbe17e07d33 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/service/auth_service.py @@ -0,0 +1,76 @@ +"""Auth service.""" + +import logging +from importlib import import_module +from types import ModuleType +from typing import Awaitable, Callable, Optional + +from fastapi import APIRouter +from openbb_core.api.router.user import ( + auth_hook as default_auth_hook, + router as default_router, + user_settings_hook as default_user_settings_hook, +) +from openbb_core.app.extension_loader import ExtensionLoader +from openbb_core.app.model.abstract.singleton import SingletonMeta +from openbb_core.app.model.user_settings import UserSettings +from openbb_core.env import Env + +EXT_NAME = Env().API_AUTH_EXTENSION + +logger = logging.getLogger("uvicorn.error") + + +class AuthServiceError(Exception): + """Authentication service error.""" + + +class AuthService(metaclass=SingletonMeta): + """Auth service.""" + + def __init__(self, ext_name: Optional[str] = EXT_NAME) -> None: + """Initialize AuthService.""" + if not self._load_extension(ext_name): + self._router = default_router + self._auth_hook = default_auth_hook + self._user_settings_hook = default_user_settings_hook + + @property + def router(self) -> APIRouter: + """Get router.""" + return self._router + + @property + def auth_hook(self) -> Callable[..., Awaitable[None]]: + """Get general authentication hook.""" + return self._auth_hook + + @property + def user_settings_hook(self) -> Callable[..., Awaitable[UserSettings]]: + """Get user settings hook.""" + return self._user_settings_hook + + @staticmethod + def _is_installed(ext_name: str) -> bool: + """Check if auth_extension is installed.""" + extension = ExtensionLoader().get_core_entry_point(ext_name) or False + return extension and ext_name == extension.name # type: ignore + + @staticmethod + def _get_entry_mod(ext_name: str) -> ModuleType: + """Get the module of the given auth_extension.""" + extension = ExtensionLoader().get_core_entry_point(ext_name) + if not extension: + raise AuthServiceError(f"Extension '{ext_name}' is not installed.") + return import_module(extension.module) + + def _load_extension(self, ext_name: Optional[str]) -> bool: + """Load auth extension.""" + if ext_name and self._is_installed(ext_name): + entry_mod = self._get_entry_mod(ext_name) + self._router = entry_mod.router + self._auth_hook = entry_mod.auth_hook + self._user_settings_hook = entry_mod.user_settings_hook + logger.info("Loaded auth_extension: %s", ext_name) + return True + return False diff --git a/openbb_platform/core/openbb_core/app/service/hub_service.py b/openbb_platform/core/openbb_core/app/service/hub_service.py new file mode 100644 index 0000000000000000000000000000000000000000..6233d3f7ddfa94c76b9c21972774584d015ff6c9 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/service/hub_service.py @@ -0,0 +1,288 @@ +"""Hub manager class.""" + +from typing import Optional, Tuple +from warnings import warn + +from fastapi import HTTPException +from jwt import ExpiredSignatureError, PyJWTError, decode, get_unverified_header +from openbb_core.app.model.abstract.error import OpenBBError +from openbb_core.app.model.credentials import Credentials +from openbb_core.app.model.defaults import Defaults +from openbb_core.app.model.hub.hub_session import HubSession +from openbb_core.app.model.hub.hub_user_settings import HubUserSettings +from openbb_core.app.model.profile import Profile +from openbb_core.app.model.user_settings import UserSettings +from openbb_core.env import Env + + +class HubService: + """Hub service class.""" + + TIMEOUT = 10 + # Mapping of V3 keys to V4 keys for backward compatibility + V3TOV4 = { + "api_key_alphavantage": "alpha_vantage_api_key", + "api_biztoc_token": "biztoc_api_key", + "api_fred_key": "fred_api_key", + "api_key_financialmodelingprep": "fmp_api_key", + "api_intrinio_key": "intrinio_api_key", + "api_polygon_key": "polygon_api_key", + "api_key_quandl": "nasdaq_api_key", + "api_tradier_token": "tradier_api_key", + } + V4TOV3 = {v: k for k, v in V3TOV4.items()} + + def __init__( + self, + session: Optional[HubSession] = None, + base_url: Optional[str] = None, + ): + """Initialize Hub service.""" + # pylint: disable=import-outside-toplevel + from openbb_core.provider.utils.helpers import get_requests_session + + self._base_url = base_url or Env().HUB_BACKEND + self._session = session + self._hub_user_settings: Optional[HubUserSettings] = None + self._request_session = get_requests_session() + + @property + def base_url(self) -> str: + """Get base url.""" + return self._base_url + + @property + def session(self) -> Optional[HubSession]: + """Get session.""" + return self._session + + def connect( + self, + email: Optional[str] = None, + password: Optional[str] = None, + pat: Optional[str] = None, + ) -> HubSession: + """Connect to Hub.""" + if email and password: + self._session = self._get_session_from_email_password(email, password) + return self._session + if pat: + self._session = self._get_session_from_platform_token(pat) + return self._session + raise OpenBBError("Please provide 'email' and 'password' or 'pat'") + + def disconnect(self) -> bool: + """Disconnect from Hub.""" + if self._session: + result = self._post_logout(self._session) + self._session = None + return result + raise OpenBBError( + "No session found. Login or provide a 'HubSession' on initialization." + ) + + def push(self, user_settings: UserSettings) -> bool: + """Push user settings to Hub.""" + if self._session: + if user_settings.credentials: + hub_user_settings = self.platform2hub( + user_settings.credentials, user_settings.defaults + ) + return self._put_user_settings(self._session, hub_user_settings) + return False + raise OpenBBError( + "No session found. Login or provide a 'HubSession' on initialization." + ) + + def pull(self) -> UserSettings: + """Pull user settings from Hub.""" + if self._session: + self._hub_user_settings = self._get_user_settings(self._session) + profile = Profile(hub_session=self._session) + credentials, defaults = self.hub2platform(self._hub_user_settings) + return UserSettings( + profile=profile, credentials=credentials, defaults=defaults + ) + raise OpenBBError( + "No session found. Login or provide a 'HubSession' on initialization." + ) + + def _get_session_from_email_password(self, email: str, password: str) -> HubSession: + """Get session from email and password.""" + if not email: + raise OpenBBError("Email not found.") + + if not password: + raise OpenBBError("Password not found.") + + response = self._request_session.post( + url=self._base_url + "/login", + json={ + "email": email, + "password": password, + "remember": True, + }, + timeout=self.TIMEOUT, + ) + + if response.status_code == 200: + session = response.json() + hub_session = HubSession( + access_token=session.get("access_token"), + token_type=session.get("token_type"), + user_uuid=session.get("uuid"), + email=session.get("email"), + username=session.get("username"), + primary_usage=session.get("primary_usage"), + ) + return hub_session + status_code = response.status_code + detail = response.json().get("detail", None) + raise HTTPException(status_code, detail) + + def _get_session_from_platform_token(self, token: str) -> HubSession: + """Get session from Platform personal access token.""" + if not token: + raise OpenBBError("Platform personal access token not found.") + + self._check_token_expiration(token) + + response = self._request_session.post( + url=self._base_url + "/sdk/login", + json={ + "token": token, + }, + timeout=self.TIMEOUT, + ) + + if response.status_code == 200: + session = response.json() + hub_session = HubSession( + access_token=session.get("access_token"), + token_type=session.get("token_type"), + user_uuid=session.get("uuid"), + username=session.get("username"), + email=session.get("email"), + primary_usage=session.get("primary_usage"), + ) + return hub_session + status_code = response.status_code + detail = response.json().get("detail", None) + raise HTTPException(status_code, detail) + + def _post_logout(self, session: HubSession) -> bool: + """Post logout.""" + access_token = session.access_token.get_secret_value() + token_type = session.token_type + authorization = f"{token_type.title()} {access_token}" + + response = self._request_session.get( + url=self._base_url + "/logout", + headers={"Authorization": authorization}, + json={"token": access_token}, + timeout=self.TIMEOUT, + ) + + if response.status_code == 200: + result = response.json() + return result.get("success", False) + status_code = response.status_code + result = response.json() + detail = result.get("detail", None) + raise HTTPException(status_code, detail) + + def _get_user_settings(self, session: HubSession) -> HubUserSettings: + """Get user settings.""" + access_token = session.access_token.get_secret_value() + token_type = session.token_type + authorization = f"{token_type.title()} {access_token}" + response = self._request_session.get( + url=self._base_url + "/terminal/user", + headers={"Authorization": authorization}, + timeout=self.TIMEOUT, + ) + + if response.status_code == 200: + user_settings = response.json() + filtered = {k: v for k, v in user_settings.items() if v is not None} + return HubUserSettings.model_validate(filtered) + status_code = response.status_code + detail = response.json().get("detail", None) + raise HTTPException(status_code, detail) + + def _put_user_settings( + self, session: HubSession, settings: HubUserSettings + ) -> bool: + """Put user settings.""" + access_token = session.access_token.get_secret_value() + token_type = session.token_type + authorization = f"{token_type.title()} {access_token}" + response = self._request_session.put( + url=self._base_url + "/user", + headers={"Authorization": authorization}, + json=settings.model_dump(exclude_defaults=True), + timeout=self.TIMEOUT, + ) + if response.status_code == 200: + return True + status_code = response.status_code + detail = response.json().get("detail", None) + raise HTTPException(status_code, detail) + + def hub2platform(self, settings: HubUserSettings) -> Tuple[Credentials, Defaults]: + """Convert Hub user settings to Platform models.""" + deprecated = { + k: v for k, v in self.V3TOV4.items() if k in settings.features_keys + } + if deprecated: + msg = "" + for k, v in deprecated.items(): + msg += f"\n'{k.upper()}' -> '{v.upper()}', " + msg = msg.strip(", ") + warn( + message=f"\nDeprecated v3 credentials found.\n{msg}" + "\n\nYou can update them at https://my.openbb.co/app/platform/credentials.", + ) + # We give priority to v4 keys over v3 keys if both are present + hub_credentials = { + self.V3TOV4.get(k, k): settings.features_keys.get(self.V3TOV4.get(k, k), v) + for k, v in settings.features_keys.items() + } + defaults = settings.features_settings.get("defaults", {}) + return Credentials(**hub_credentials), Defaults(**defaults) + + def platform2hub( + self, credentials: Credentials, defaults: Defaults + ) -> HubUserSettings: + """Convert Platform models to Hub user settings.""" + # Dump mode json ensures SecretStr values are serialized as strings + credentials = credentials.model_dump( + mode="json", exclude_none=True, exclude_defaults=True + ) + settings = self._hub_user_settings or HubUserSettings() + for v4_k, v in sorted(credentials.items()): + v3_k = self.V4TOV3.get(v4_k, None) + # If v3 key was in the hub already, we keep it + k = v3_k if v3_k in settings.features_keys else v4_k + settings.features_keys[k] = v + defaults_ = defaults.model_dump( + mode="json", exclude_none=True, exclude_defaults=True + ) + settings.features_settings.update({"defaults": defaults_}) + return settings + + @staticmethod + def _check_token_expiration(token: str) -> None: + """Check token expiration, raises exception if expired.""" + try: + header_data = get_unverified_header(token) + decode( + token, + key="secret", + algorithms=[header_data["alg"]], + options={"verify_signature": False, "verify_exp": True}, + ) + except ExpiredSignatureError as e: + raise OpenBBError("Platform personal access token expired.") from e + except PyJWTError as e: + raise OpenBBError("Failed to decode Platform token.") from e diff --git a/openbb_platform/core/openbb_core/app/service/system_service.py b/openbb_platform/core/openbb_core/app/service/system_service.py new file mode 100644 index 0000000000000000000000000000000000000000..c9e3165e179085115f9a45103c339911ccc0cf31 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/service/system_service.py @@ -0,0 +1,109 @@ +"""System service.""" + +import hashlib +import json +from pathlib import Path +from typing import Optional + +from openbb_core.app.constants import SYSTEM_SETTINGS_PATH +from openbb_core.app.model.abstract.singleton import SingletonMeta +from openbb_core.app.model.system_settings import SystemSettings + + +class SystemService(metaclass=SingletonMeta): + """System service.""" + + SYSTEM_SETTINGS_PATH = SYSTEM_SETTINGS_PATH + SYSTEM_SETTINGS_ALLOWED_FIELD_SET = { + "log_collect", + "test_mode", + "headless", + "logging_sub_app", + "api_settings", + "python_settings", + "debug_mode", + "logging_suppress", + } + + PRO_VALIDATION_HASH = "300ac59fdcc8f899e0bc5c18cda8652220735da1a00e2af365efe9d8e5fe8306" # pragma: allowlist secret + + def __init__( + self, + **kwargs, + ): + """Initialize system service.""" + self._system_settings = self._read_from_file( + path=self.SYSTEM_SETTINGS_PATH, **kwargs + ) + + @classmethod + def _compare_hash(cls, input_value, existing_hash: Optional[str] = None): + existing_hash = existing_hash or cls.PRO_VALIDATION_HASH + + hash_object = hashlib.sha256() + hash_object.update(input_value.encode("utf-8")) + hashed_input = hash_object.hexdigest() + + return hashed_input == existing_hash + + @classmethod + def _read_from_file(cls, path: Optional[Path] = None, **kwargs) -> SystemSettings: + """Read default system settings.""" + path = path or cls.SYSTEM_SETTINGS_PATH + + if path.exists(): + with path.open(mode="r") as file: + system_settings_json = file.read() + + system_settings_dict = json.loads(system_settings_json) + + S = system_settings_dict.copy() + for field in S: + if field not in cls.SYSTEM_SETTINGS_ALLOWED_FIELD_SET: + del system_settings_dict[field] + elif field == "logging_sub_app": + if cls._compare_hash(system_settings_dict[field]): + system_settings_dict[field] = "pro" + kwargs.pop(field, None) + else: + del system_settings_dict[field] + + system_settings_dict.update(kwargs) + system_settings = SystemSettings.model_validate(system_settings_dict) + else: + system_settings = SystemSettings.model_validate(kwargs) + + return system_settings + + @classmethod + def write_to_file( + cls, + system_settings: SystemSettings, + path: Optional[Path] = None, + ) -> None: + """Write default system settings.""" + path = path or cls.SYSTEM_SETTINGS_PATH + + system_settings_json = system_settings.model_dump_json( + indent=4, + include=cls.SYSTEM_SETTINGS_ALLOWED_FIELD_SET, + exclude_defaults=True, + ) + with path.open(mode="w") as file: + file.write(system_settings_json) + + @property + def system_settings(self) -> SystemSettings: + """Get system settings.""" + return self._system_settings + + @system_settings.setter + def system_settings(self, system_settings: SystemSettings) -> None: + """Set system settings.""" + self._system_settings = system_settings + + def refresh_system_settings(self) -> SystemSettings: + """Refresh system settings.""" + self._system_settings = self._read_from_file() + + return self._system_settings diff --git a/openbb_platform/core/openbb_core/app/service/user_service.py b/openbb_platform/core/openbb_core/app/service/user_service.py new file mode 100644 index 0000000000000000000000000000000000000000..5ed35acc448e33d231604d8d866a10dc0efabb9f --- /dev/null +++ b/openbb_platform/core/openbb_core/app/service/user_service.py @@ -0,0 +1,77 @@ +"""User service.""" + +import json +from functools import reduce +from pathlib import Path +from typing import Any, Dict, List, MutableMapping, Optional + +from openbb_core.app.constants import USER_SETTINGS_PATH +from openbb_core.app.model.abstract.singleton import SingletonMeta +from openbb_core.app.model.user_settings import UserSettings + + +class UserService(metaclass=SingletonMeta): + """User service.""" + + USER_SETTINGS_PATH = USER_SETTINGS_PATH + USER_SETTINGS_ALLOWED_FIELD_SET = {"credentials", "preferences", "defaults"} + + def __init__( + self, + default_user_settings: Optional[UserSettings] = None, + ): + """Initialize user service.""" + self._default_user_settings = default_user_settings or self.read_from_file() + + @classmethod + def read_from_file(cls, path: Optional[Path] = None) -> UserSettings: + """Read user settings from json into UserSettings.""" + path = path or cls.USER_SETTINGS_PATH + + return ( + UserSettings.model_validate(json.loads(path.read_text(encoding="utf-8"))) + if path.exists() + else UserSettings() + ) + + @classmethod + def write_to_file( + cls, + user_settings: UserSettings, + path: Optional[Path] = None, + ) -> None: + """Write user settings to json.""" + path = path or cls.USER_SETTINGS_PATH + user_settings_json = user_settings.model_dump_json( + indent=4, include=cls.USER_SETTINGS_ALLOWED_FIELD_SET, exclude_defaults=True + ) + path.write_text(user_settings_json, encoding="utf-8") + + @staticmethod + def _merge_dicts(list_of_dicts: List[Dict[str, Any]]) -> Dict[str, Any]: + """Merge a list of dictionaries.""" + + def recursive_merge(d1: Dict, d2: Dict) -> Dict: + """Recursively merge dict d2 into dict d1 if d2 is value is not None.""" + for k, v in d1.items(): + if k in d2 and all(isinstance(e, MutableMapping) for e in (v, d2[k])): + d2[k] = recursive_merge(v, d2[k]) + + d3 = d1.copy() + d3.update((k, v) for k, v in d2.items() if v is not None) + return d3 + + result: Dict[str, Any] = {} + for d in list_of_dicts: + result = reduce(recursive_merge, (result, d)) + return result + + @property + def default_user_settings(self) -> UserSettings: + """Return default user settings.""" + return self._default_user_settings + + @default_user_settings.setter + def default_user_settings(self, default_user_settings: UserSettings) -> None: + """Set default user settings.""" + self._default_user_settings = default_user_settings diff --git a/openbb_platform/core/openbb_core/app/static/__init__.py b/openbb_platform/core/openbb_core/app/static/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..bbfce45ce15793a0135f6d4f3ba9b84394ad8cb0 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/static/__init__.py @@ -0,0 +1 @@ +"""OpenBB Core App Static.""" diff --git a/openbb_platform/core/openbb_core/app/static/account.py b/openbb_platform/core/openbb_core/app/static/account.py new file mode 100644 index 0000000000000000000000000000000000000000..d840b6b9fa35663b15c66f588de6bd9d00341257 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/static/account.py @@ -0,0 +1,219 @@ +"""Account.""" + +# pylint: disable=W0212:protected-access +import json +from functools import wraps +from pathlib import Path +from sys import exc_info +from typing import TYPE_CHECKING, Optional + +from openbb_core.app.logs.logging_service import LoggingService +from openbb_core.app.model.abstract.error import OpenBBError +from openbb_core.app.model.hub.hub_session import HubSession +from openbb_core.app.model.user_settings import UserSettings +from openbb_core.app.service.hub_service import HubService +from openbb_core.app.service.user_service import UserService + +if TYPE_CHECKING: + from openbb_core.app.static.app_factory import BaseApp + + +class Account: # noqa: D205, D400 + """/account + login + logout + save + refresh + """ + + SESSION_FILE = ".hub_session.json" + + def __init__(self, base_app: "BaseApp"): + """Initialize account service.""" + self._base_app = base_app + self._openbb_directory = ( + base_app._command_runner.system_settings.openbb_directory + ) + self._hub_service: Optional[HubService] = None + + def __repr__(self) -> str: + """Human readable representation of the object.""" + return self.__doc__ or "" + + def _log_account_command(func): # pylint: disable=E0213 + """Log account command.""" + + @wraps(func) # type: ignore[arg-type] + def wrapped(self, *args, **kwargs): + try: + # pylint: disable=E1102 + result = func(self, *args, **kwargs) # type: ignore[operator] + except Exception as e: + raise OpenBBError(e) from e + finally: + user_settings = self._base_app._command_runner.user_settings + system_settings = self._base_app._command_runner.system_settings + ls = LoggingService( + user_settings=user_settings, system_settings=system_settings + ) + ls.log( + user_settings=user_settings, + system_settings=system_settings, + # pylint: disable=E1101 + route=f"/account/{func.__name__}", # type: ignore[attr-defined] + func=func, # type: ignore[arg-type] + kwargs={}, # don't want any credentials being logged by accident + exec_info=exc_info(), + ) + + return result + + return wrapped + + def _create_hub_service( + self, + email: Optional[str] = None, + password: Optional[str] = None, + pat: Optional[str] = None, + ) -> HubService: + """Create hub service to handle connection.""" + if email is None and password is None and pat is None: + session_file = Path(self._openbb_directory, self.SESSION_FILE) + if not session_file.exists(): + raise OpenBBError("Session not found.") + + with open(session_file) as f: + session_dict = json.load(f) + + hub_session = HubSession(**session_dict) + hs = HubService(hub_session) + else: + hs = HubService() + hs.connect(email, password, pat) + return hs + + @_log_account_command # type: ignore + def login( + self, + email: Optional[str] = None, + password: Optional[str] = None, + pat: Optional[str] = None, + remember_me: bool = False, + return_settings: bool = False, + ) -> Optional[UserSettings]: + """Login to hub. + + Parameters + ---------- + email : Optional[str], optional + Email address, by default None + password : Optional[str], optional + Password, by default None + pat : Optional[str], optional + Personal access token, by default None + remember_me : bool, optional + Remember me, by default False + return_settings : bool, optional + Return user settings, by default False + + Returns + ------- + Optional[UserSettings] + User settings: profile, credentials, preferences + """ + self._hub_service = self._create_hub_service(email, password, pat) + incoming = self._hub_service.pull() + self._base_app.user.profile = incoming.profile + self._base_app.user.credentials.update(incoming.credentials) + self._base_app.user.defaults.update(incoming.defaults) + if remember_me: + Path(self._openbb_directory).mkdir(parents=False, exist_ok=True) + session_file = Path(self._openbb_directory, self.SESSION_FILE) + with open(session_file, "w") as f: + if not self._hub_service.session: + raise OpenBBError("Not connected to hub.") + + json.dump( + self._hub_service.session.model_dump(mode="json"), f, indent=4 + ) + + if return_settings: + return self._base_app._command_runner.user_settings + return None + + @_log_account_command # type: ignore + def save(self, return_settings: bool = False) -> Optional[UserSettings]: + """Save user settings. + + Parameters + ---------- + return_settings : bool, optional + Return user settings, by default False + + Returns + ------- + Optional[UserSettings] + User settings: profile, credentials, preferences + """ + if not self._hub_service: + UserService.write_to_file(self._base_app._command_runner.user_settings) + else: + self._hub_service.push(self._base_app._command_runner.user_settings) + + if return_settings: + return self._base_app._command_runner.user_settings + return None + + @_log_account_command # type: ignore + def refresh(self, return_settings: bool = False) -> Optional[UserSettings]: + """Refresh user settings. + + Parameters + ---------- + return_settings : bool, optional + Return user settings, by default False + + Returns + ------- + Optional[UserSettings] + User settings: profile, credentials, preferences + """ + if not self._hub_service: + self._base_app._command_runner.user_settings = UserService.read_from_file() + else: + incoming = self._hub_service.pull() + self._base_app.user.profile = incoming.profile + self._base_app.user.credentials.update(incoming.credentials) + self._base_app.user.defaults.update(incoming.defaults) + if return_settings: + return self._base_app._command_runner.user_settings + return None + + @_log_account_command # type: ignore + def logout(self, return_settings: bool = False) -> Optional[UserSettings]: + """Logout from hub. + + Parameters + ---------- + return_settings : bool, optional + Return user settings, by default False + + Returns + ------- + Optional[UserSettings] + User settings: profile, credentials, preferences + """ + if not self._hub_service: + raise OpenBBError("Not connected to hub.") + + self._hub_service.disconnect() + + session_file = Path(self._openbb_directory, self.SESSION_FILE) + if session_file.exists(): + session_file.unlink() + + self._base_app._command_runner.user_settings = UserService.read_from_file() + + if return_settings: + return self._base_app._command_runner.user_settings + return None diff --git a/openbb_platform/core/openbb_core/app/static/app_factory.py b/openbb_platform/core/openbb_core/app/static/app_factory.py new file mode 100644 index 0000000000000000000000000000000000000000..7141e5734008dea1ee64f6dc690d55333aecd168 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/static/app_factory.py @@ -0,0 +1,71 @@ +"""App factory.""" + +from typing import Dict, Optional, Type, TypeVar + +from openbb_core.app.command_runner import CommandRunner +from openbb_core.app.model.system_settings import SystemSettings +from openbb_core.app.model.user_settings import UserSettings +from openbb_core.app.static.account import Account +from openbb_core.app.static.container import Container +from openbb_core.app.static.coverage import Coverage +from openbb_core.app.static.reference_loader import ReferenceLoader +from openbb_core.app.version import VERSION + +E = TypeVar("E", bound=Type[Container]) +BASE_DOC = f"""OpenBB Platform v{VERSION} + +Utilities: + /account + /user + /system + /coverage +""" + + +class BaseApp: + """Base app.""" + + def __init__(self, command_runner: CommandRunner): + """Initialize the app.""" + command_runner.init_logging_service() + self._command_runner = command_runner + self._account = Account(self) + self._coverage = Coverage(self) + self._reference = ReferenceLoader().reference + + @property + def account(self) -> Account: + """Account menu.""" + return self._account + + @property + def user(self) -> UserSettings: + """User settings.""" + return self._command_runner.user_settings + + @property + def system(self) -> SystemSettings: + """System settings.""" + return self._command_runner.system_settings + + @property + def coverage(self) -> Coverage: + """Coverage menu.""" + return self._coverage + + @property + def reference(self) -> Dict[str, Dict]: + """Return reference data.""" + return self._reference + + +def create_app(extensions: Optional[E] = None) -> Type[BaseApp]: + """Create the app.""" + + class App(BaseApp, extensions or object): # type: ignore[misc] + def __repr__(self) -> str: + # pylint: disable=E1101 + ext_doc = extensions.__doc__ if extensions else "" + return BASE_DOC + (ext_doc or "") + + return App(command_runner=CommandRunner()) diff --git a/openbb_platform/core/openbb_core/app/static/container.py b/openbb_platform/core/openbb_core/app/static/container.py new file mode 100644 index 0000000000000000000000000000000000000000..eac555e4e0f22bcf4389d4cbe535d1295293237a --- /dev/null +++ b/openbb_platform/core/openbb_core/app/static/container.py @@ -0,0 +1,105 @@ +"""Container class.""" + +from typing import TYPE_CHECKING, Any, Optional + +from openbb_core.app.model.abstract.error import OpenBBError + +if TYPE_CHECKING: + from openbb_core.app.command_runner import CommandRunner + + +class Container: + """Container class for the command runner session.""" + + def __init__(self, command_runner: "CommandRunner") -> None: + """Initialize the container.""" + # pylint: disable=import-outside-toplevel + from openbb_core.app.model.obbject import OBBject + + self._command_runner = command_runner + OBBject._user_settings = command_runner.user_settings + OBBject._system_settings = command_runner.system_settings + + def _run(self, *args, **kwargs) -> Any: + """Run a command in the container.""" + endpoint = args[0][1:].replace("/", ".") if args else "" + defaults = self._command_runner.user_settings.defaults.commands + + if endpoint and defaults and defaults.get(endpoint): + default_params = { + k: v for k, v in defaults[endpoint].items() if k != "provider" + } + for k, v in default_params.items(): + if k == "chart" and v is True: + kwargs["chart"] = True + elif ( + k in kwargs["standard_params"] + and kwargs["standard_params"][k] is None + ): + kwargs["standard_params"][k] = v + elif ( + k in kwargs["extra_params"] and kwargs["extra_params"][k] is None + ) or k not in kwargs["extra_params"]: + kwargs["extra_params"][k] = v + + obbject = self._command_runner.sync_run(*args, **kwargs) + output_type = self._command_runner.user_settings.preferences.output_type + if output_type == "OBBject": + return obbject + return getattr(obbject, "to_" + output_type)() + + def _check_credentials(self, provider: str) -> Optional[bool]: + """Check required credentials are populated.""" + credentials = self._command_runner.user_settings.credentials + if provider not in credentials.origins: + return None + required = credentials.origins.get(provider) + return all(getattr(credentials, r, None) for r in required) + + def _get_provider( + self, choice: Optional[str], command: str, default_priority: tuple[str, ...] + ) -> str: + """Get the provider to use in execution. + + If no choice is specified, the configured priority list is used. A provider is used + when all of its required credentials are populated. + + Parameters + ---------- + choice: Optional[str] + The provider choice, for example 'fmp'. + command: str + The command to get the provider for, for example 'equity.price.historical' + default_priority: Tuple[str, ...] + A tuple of available providers for the given command to use as default priority list. + + Returns + ------- + str + The provider to use in the command. + + Raises + ------ + OpenBBError + Raises error when all the providers in the priority list failed. + """ + if choice is None: + commands = self._command_runner.user_settings.defaults.commands + providers = ( + commands.get(command, {}).get("provider", []) or default_priority + ) + tries = [] + if len(providers) == 1: + return providers[0] + for p in providers: + result = self._check_credentials(p) + if result: + return p + if result is False: + tries.append((p, "missing credentials")) + else: + tries.append((p, f"not installed, please install openbb-{p}")) + + msg = "\n ".join([f"* '{pair[0]}' -> {pair[1]}" for pair in tries]) + raise OpenBBError(f"Provider fallback failed.\n" f"[Providers]\n {msg}") + return choice diff --git a/openbb_platform/core/openbb_core/app/static/coverage.py b/openbb_platform/core/openbb_core/app/static/coverage.py new file mode 100644 index 0000000000000000000000000000000000000000..f63ffc54ddbcf1a4cd5aa0556368659e58f748e0 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/static/coverage.py @@ -0,0 +1,61 @@ +"""Coverage module.""" + +from typing import TYPE_CHECKING, Any, Dict, List, Optional + +from openbb_core.api.router.helpers.coverage_helpers import get_route_schema_map +from openbb_core.app.provider_interface import ProviderInterface +from openbb_core.app.router import CommandMap +from openbb_core.app.static.reference_loader import ReferenceLoader + +if TYPE_CHECKING: + from openbb_core.app.static.app_factory import BaseApp + + +class Coverage: # noqa: D205, D400 + """/coverage + providers + commands + command_model + command_schemas + reference + """ + + def __init__(self, app: "BaseApp"): + """Initialize coverage.""" + self._app = app + self._command_map = CommandMap(coverage_sep=".") + self._provider_interface = ProviderInterface() + self._reference_loader = ReferenceLoader() + + def __repr__(self) -> str: + """Return docstring.""" + return self.__doc__ or "" + + @property + def providers(self) -> Dict[str, List[str]]: + """Return providers coverage.""" + return self._command_map.provider_coverage + + @property + def commands(self) -> Dict[str, List[str]]: + """Return commands coverage.""" + return self._command_map.command_coverage + + @property + def command_model(self) -> Dict[str, Dict[str, Dict[str, Dict[str, Any]]]]: + """Return command to model mapping.""" + return { + command: self._provider_interface.map[value] + for command, value in self._command_map.commands_model.items() + } + + @property + def reference(self) -> Dict[str, Dict]: + """Return reference data.""" + return self._reference_loader.reference + + def command_schemas(self, filter_by_provider: Optional[str] = None): + """Return route schema for a command.""" + return get_route_schema_map( + self._app, self._command_map.commands_model, filter_by_provider + ) diff --git a/openbb_platform/core/openbb_core/app/static/package_builder.py b/openbb_platform/core/openbb_core/app/static/package_builder.py new file mode 100644 index 0000000000000000000000000000000000000000..3abcad4c442a6fcf6299fa08f94b7b5ecc3febf4 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/static/package_builder.py @@ -0,0 +1,3332 @@ +"""Package Builder Class.""" + +# pylint: disable=too-many-lines,too-many-locals,too-many-nested-blocks,too-many-statements,too-many-branches,too-many-positional-arguments +import builtins +import inspect +import re +import shutil +import sys +from functools import partial +from inspect import Parameter, _empty, isclass, signature +from json import dumps, load +from pathlib import Path +from typing import ( + TYPE_CHECKING, + Any, + Callable, + Dict, + List, + Literal, + Optional, + OrderedDict, + Set, + Tuple, + Type, + TypeVar, + Union, + get_args, + get_origin, + get_type_hints, +) + +from fastapi import Query +from importlib_metadata import entry_points +from openbb_core.app.extension_loader import ExtensionLoader, OpenBBGroups +from openbb_core.app.model.example import Example +from openbb_core.app.model.field import OpenBBField +from openbb_core.app.model.obbject import OBBject +from openbb_core.app.provider_interface import ProviderInterface +from openbb_core.app.router import RouterLoader +from openbb_core.app.service.system_service import SystemService +from openbb_core.app.static.utils.console import Console +from openbb_core.app.static.utils.linters import Linters +from openbb_core.app.version import CORE_VERSION, VERSION +from openbb_core.env import Env +from pydantic.fields import FieldInfo +from pydantic_core import PydanticUndefined +from starlette.routing import BaseRoute +from typing_extensions import Annotated, _AnnotatedAlias + +if TYPE_CHECKING: + # pylint: disable=import-outside-toplevel + from numpy import ndarray # noqa + from pandas import DataFrame, Series # noqa + from openbb_core.provider.abstract.data import Data # noqa + +try: + from openbb_charting import Charting # type: ignore + + CHARTING_INSTALLED = True +except ImportError: + CHARTING_INSTALLED = False + +DataProcessingSupportedTypes = TypeVar( + "DataProcessingSupportedTypes", + list, + dict, + "DataFrame", + list["DataFrame"], + "Series", + list["Series"], + "ndarray", + "Data", +) + +TAB = " " + + +def create_indent(n: int) -> str: + """Create n indentation space.""" + return TAB * n + + +class PackageBuilder: + """Build the extension package for the Platform.""" + + def __init__( + self, directory: Optional[Path] = None, lint: bool = True, verbose: bool = False + ) -> None: + """Initialize the package builder.""" + self.directory = directory or Path(__file__).parent + self.lint = lint + self.verbose = verbose + self.console = Console(verbose) + self.route_map = PathHandler.build_route_map() + self.path_list = PathHandler.build_path_list(route_map=self.route_map) + + def auto_build(self) -> None: + """Trigger build if there are differences between built and installed extensions.""" + if Env().AUTO_BUILD: + reference = PackageBuilder._read( + self.directory / "assets" / "reference.json" + ) + ext_map = reference.get("info", {}).get("extensions", {}) + add, remove = PackageBuilder._diff(ext_map) + if add: + a = ", ".join(sorted(add)) + print(f"Extensions to add: {a}") # noqa: T201 + + if remove: + r = ", ".join(sorted(remove)) + print(f"Extensions to remove: {r}") # noqa: T201 + + if add or remove: + print("\nBuilding...") # noqa: T201 + self.build() + + def build( + self, + modules: Optional[Union[str, List[str]]] = None, + ) -> None: + """Build the extensions for the Platform.""" + self.console.log("\nBuilding extensions package...\n") + self._clean(modules) + ext_map = self._get_extension_map() + self._save_modules(modules, ext_map) + self._save_package() + self._save_reference_file(ext_map) + if self.lint: + self._run_linters() + + def _clean(self, modules: Optional[Union[str, List[str]]] = None) -> None: + """Delete the assets and package folder or modules before building.""" + shutil.rmtree(self.directory / "assets", ignore_errors=True) + if modules: + for module in modules: + module_path = self.directory / "package" / f"{module}.py" + if module_path.exists(): + module_path.unlink() + else: + shutil.rmtree(self.directory / "package", ignore_errors=True) + + def _get_extension_map(self) -> Dict[str, List[str]]: + """Get map of extensions available at build time.""" + el = ExtensionLoader() + og = OpenBBGroups.groups() + ext_map: Dict[str, List[str]] = {} + + for group, entry_point in zip(og, el.entry_points): + ext_map[group] = [ + f"{e.name}@{getattr(e.dist, 'version', '')}" for e in entry_point + ] + return ext_map + + def _save_modules( + self, + modules: Optional[Union[str, List[str]]] = None, + ext_map: Optional[Dict[str, List[str]]] = None, + ): + """Save the modules.""" + self.console.log("\nWriting modules...") + + if not self.path_list: + self.console.log("\nThere is nothing to write.") + return + + MAX_LEN = max([len(path) for path in self.path_list if path != "/"]) + + _path_list = ( + [path for path in self.path_list if path in modules] + if modules + else self.path_list + ) + + for path in _path_list: + route = PathHandler.get_route(path, self.route_map) + if route is None: + code = ModuleBuilder.build(path, ext_map) + name = PathHandler.build_module_name(path) + self.console.log(f"({path})", end=" " * (MAX_LEN - len(path))) + self._write(code, name) + + def _save_package(self): + """Save the package.""" + self.console.log("\nWriting package __init__...") + code = "### THIS FILE IS AUTO-GENERATED. DO NOT EDIT. ###\n" + self._write(code=code, name="__init__") + + def _save_reference_file(self, ext_map: Optional[Dict[str, List[str]]] = None): + """Save the reference.json file.""" + self.console.log("\nWriting reference file...") + code = dumps( + obj={ + "openbb": VERSION.replace("dev", ""), + "info": { + "title": "OpenBB Platform (Python)", + "description": "Investment research for everyone, anywhere.", + "core": CORE_VERSION.replace("dev", ""), + "extensions": ext_map, + }, + "paths": ReferenceGenerator.get_paths(self.route_map), + "routers": ReferenceGenerator.get_routers(self.route_map), + }, + indent=4, + ) + self._write(code=code, name="reference", extension="json", folder="assets") + + def _run_linters(self): + """Run the linters.""" + self.console.log("\nRunning linters...") + linters = Linters(self.directory / "package", self.verbose) + linters.ruff() + linters.black() + + def _write( + self, code: str, name: str, extension: str = "py", folder: str = "package" + ) -> None: + """Write the module to the package.""" + package_folder = self.directory / folder + package_path = package_folder / f"{name}.{extension}" + + package_folder.mkdir(exist_ok=True) + + self.console.log(str(package_path)) + with package_path.open("w", encoding="utf-8", newline="\n") as file: + file.write(code.replace("typing.", "").replace("List", "list")) + + @staticmethod + def _read(path: Path) -> dict: + """Get content from folder.""" + try: + with open(Path(path)) as fp: + content = load(fp) + except Exception: + content = {} + + return content + + @staticmethod + def _diff(ext_map: Dict[str, List[str]]) -> Tuple[Set[str], Set[str]]: + """Check differences between built and installed extensions. + + Parameters + ---------- + ext_map: Dict[str, List[str]] + Dictionary containing the extensions. + Example: + { + "openbb_core_extension": [ + "commodity@1.0.1", + ... + ], + "openbb_provider_extension": [ + "benzinga@1.1.3", + ... + ], + "openbb_obbject_extension": [ + "openbb_charting@1.0.0", + ... + ] + } + + Returns + ------- + Tuple[Set[str], Set[str]] + First element: set of installed extensions that are not in the package. + Second element: set of extensions in the package that are not installed. + """ + add: Set[str] = set() + remove: Set[str] = set() + groups = OpenBBGroups.groups() + + for g in groups: + built = set(ext_map.get(g, {})) + installed = set( + f"{e.name}@{getattr(e.dist, 'version', '')}" + for e in entry_points(group=g) + ) + add = add.union(installed - built) + remove = remove.union(built - installed) + + return add, remove + + +class ModuleBuilder: + """Build the module for the Platform.""" + + @staticmethod + def build(path: str, ext_map: Optional[Dict[str, List[str]]] = None) -> str: + """Build the module.""" + code = "### THIS FILE IS AUTO-GENERATED. DO NOT EDIT. ###\n\n" + code += ImportDefinition.build(path) + code += ClassDefinition.build(path, ext_map) + + return code + + +class ImportDefinition: + """Build the import definition for the Platform.""" + + @staticmethod + def filter_hint_type_list(hint_type_list: List[Type]) -> List[Type]: + """Filter the hint type list.""" + new_hint_type_list = [] + primitive_types = {int, float, str, bool, list, dict, tuple, set} + + for hint_type in hint_type_list: + # Skip primitive types and empty types + if hint_type == _empty or hint_type in primitive_types: + continue + + # Only include types that have a module and are not builtins + if ( + hasattr(hint_type, "__module__") and hint_type.__module__ != "builtins" + ) or (isinstance(hint_type, str)): + new_hint_type_list.append(hint_type) + + new_hint_type_list = list(set(new_hint_type_list)) + return new_hint_type_list + + @classmethod + def get_function_hint_type_list(cls, route) -> List[Type]: + """Get the hint type list from the function.""" + + no_validate = getattr(route, "openapi_extra", {}).get("no_validate") + + func = route.endpoint + sig = signature(func) + if no_validate is True: + route.response_model = None + + parameter_map = sig.parameters + return_type = sig.return_annotation if not no_validate else route.response_model + + hint_type_list: list = [] + + for parameter in parameter_map.values(): + hint_type_list.append(parameter.annotation) + + # Extract dependencies from Annotated metadata + if isinstance(parameter.annotation, _AnnotatedAlias): + for meta in parameter.annotation.__metadata__: + # Check if this is a Depends object + if hasattr(meta, "dependency"): + # Add the dependency function to hint_type_list + hint_type_list.append(meta.dependency) + + if return_type: + hint_type = ( + get_args(get_type_hints(return_type)["results"])[0] + if hasattr(return_type, "__class__") + and hasattr(return_type.__class__, "__name__") + and "OBBject" in return_type.__class__.__name__ + else return_type + ) + hint_type_list.append(hint_type) + + hint_type_list = cls.filter_hint_type_list(hint_type_list) + + return hint_type_list + + @classmethod + def get_path_hint_type_list(cls, path: str) -> List[Type]: + """Get the hint type list from the path.""" + route_map = PathHandler.build_route_map() + path_list = PathHandler.build_path_list(route_map=route_map) + child_path_list = PathHandler.get_child_path_list( + path=path, path_list=path_list + ) + hint_type_list = [] + for child_path in child_path_list: + route = PathHandler.get_route(path=child_path, route_map=route_map) + if route: + if route.deprecated: + hint_type_list.append(type(route.summary.metadata)) + function_hint_type_list = cls.get_function_hint_type_list(route=route) # type: ignore + hint_type_list.extend(function_hint_type_list) + + hint_type_list = [ + d + for d in list(set(hint_type_list)) + if d not in [int, list, str, dict, float, set] + ] + return hint_type_list + + @classmethod + def build(cls, path: str) -> str: + """Build the import definition.""" + hint_type_list = cls.get_path_hint_type_list(path=path) + code = "from openbb_core.app.static.container import Container" + code += "\nfrom openbb_core.app.model.obbject import OBBject" + + # These imports were not detected before build, so we add them manually and + # ruff --fix the resulting code to remove unused imports. + # TODO: Find a better way to handle this. This is a temporary solution. + code += "\nimport openbb_core.provider" + code += "\nfrom openbb_core.provider.abstract.data import Data" + code += "\nimport pandas" + code += "\nfrom pandas import DataFrame, Series" + code += "\nimport numpy" + code += "\nfrom numpy import ndarray" + code += "\nimport datetime" + code += "\nfrom datetime import date" + code += "\nimport pydantic" + code += "\nfrom pydantic import BaseModel" + code += "\nfrom inspect import Parameter" + code += "\nimport typing" + code += "\nfrom typing import TYPE_CHECKING, ForwardRef, Union, Optional, Literal, Any" + code += "\nfrom annotated_types import Ge, Le, Gt, Lt" + code += "\nfrom warnings import warn, simplefilter" + code += "\nfrom typing_extensions import Annotated, deprecated" + code += "\nfrom openbb_core.app.static.utils.decorators import exception_handler, validate\n" + code += "\nfrom openbb_core.app.static.utils.filters import filter_inputs\n" + code += "\nfrom openbb_core.app.deprecation import OpenBBDeprecationWarning\n" + code += "\nfrom openbb_core.app.model.field import OpenBBField" + code += "\nfrom fastapi import Depends" + + module_list = [ + hint_type.__module__ if hasattr(hint_type, "__module__") else hint_type + for hint_type in hint_type_list + ] + module_list = list(set(module_list)) + module_list.sort() + + code += "\n" + for module in module_list: + code += f"import {module}\n" + + # Group types by module and capture the return types for the imports. + module_types: dict = {} + for hint_type in hint_type_list: + if hasattr(hint_type, "__module__") and hint_type.__module__ != "builtins": + module = hint_type.__module__ + + # Extract only the base type name without generic parameters + if hasattr(hint_type, "__origin__"): + # This is a generic type like List[...] or Dict[...] + type_name = ( + hint_type.__origin__.__name__ + if hasattr(hint_type.__origin__, "__name__") + else str(hint_type.__origin__) + ) + else: + # Extract the base name before any square brackets + raw_type_name = getattr( + hint_type, + "__name__", + str(hint_type).rsplit(".", maxsplit=1)[-1], + ) + type_name = ( + raw_type_name.split("[")[0] + if "[" in raw_type_name + else raw_type_name + ) + + # Skip built-in types when adding to typing module + if ( + module == "typing" and type_name in dir(__builtins__) + ) or type_name in [ + "Dict", + "List", + "int", + int, + "float", + float, + str, + "str", + ]: + continue + + if module not in module_types: + module_types[module] = set() + module_types[module].add(type_name) + + # Generate from-import statements for modules with specific types + for module, types in sorted(module_types.items()): + if len(types) == 1: + type_name = next(iter(types)) + code += f"\nfrom {module} import {type_name}" + else: + import_types = [ + d + for d in sorted(types) + if d + not in [ + "Dict", + "List", + "int", + "float", + "str", + "dict", + "list", + "set", + ] + ] + if import_types: + code += f"\nfrom {module} import (" + for type_name in import_types: + code += f"\n {type_name}," + code += "\n)" + code += "\n" + + return code + "\n" + + +class ClassDefinition: + """Build the class definition for the Platform.""" + + @staticmethod + def build(path: str, ext_map: Optional[Dict[str, List[str]]] = None) -> str: + """Build the class definition.""" + class_name = PathHandler.build_module_class(path=path) + code = f"class {class_name}(Container):\n" + + route_map = PathHandler.build_route_map() + path_list = PathHandler.build_path_list(route_map) + child_path_list = sorted( + PathHandler.get_child_path_list( + path=path, + path_list=path_list, + ) + ) + + doc = f' """{path}\n' if path else ' # fmt: off\n """\nRouters:\n' + methods = "" + for c in child_path_list: + route = PathHandler.get_route(c, route_map) + if route: + doc += f" {route.name}\n" + methods += MethodDefinition.build_command_method( + path=route.path, + func=route.endpoint, + model_name=( + route.openapi_extra.get("model", None) + if route.openapi_extra + else None + ), + examples=( + route.openapi_extra.get("examples", []) + if route.openapi_extra + else [] + ), + ) + else: + doc += " /" if path else " /" + doc += c.split("/")[-1] + "\n" + methods += MethodDefinition.build_class_loader_method(path=c) + + if not path: + if ext_map: + doc += "\n" + doc += "Extensions:\n" + doc += "\n".join( + [f" - {ext}" for ext in ext_map.get("openbb_core_extension", [])] + ) + doc += "\n\n" + doc += "\n".join( + [ + f" - {ext}" + for ext in ext_map.get("openbb_provider_extension", []) + ] + ) + doc += ' """\n' + doc += " # fmt: on\n" + else: + doc += ' """\n' + + code += doc + "\n" + code += " def __repr__(self) -> str:\n" + code += ' return self.__doc__ or ""\n' + code += methods + + return code + + +class MethodDefinition: + """Build the method definition for the Platform.""" + + # These are types we want to expand. + # For example, start_date is always a 'date', but we also accept 'str' as input. + # Be careful, if the type is not coercible by pydantic to the original type, you + # will need to add some conversion code in the input filter. + TYPE_EXPANSION = { + "data": DataProcessingSupportedTypes, + "start_date": str, + "end_date": str, + "date": str, + "provider": None, + } + + @staticmethod + def build_class_loader_method(path: str) -> str: + """Build the class loader method.""" + module_name = PathHandler.build_module_name(path=path) + class_name = PathHandler.build_module_class(path=path) + function_name = path.rsplit("/", maxsplit=1)[-1].strip("/") + + code = "\n @property\n" + code += f" def {function_name}(self):\n" + code += " # pylint: disable=import-outside-toplevel\n" + code += f" from . import {module_name}\n\n" + code += f" return {module_name}.{class_name}(command_runner=self._command_runner)\n" + + return code + + @staticmethod + def get_type(field: FieldInfo) -> type: + """Get the type of the field.""" + field_type = getattr( + field, "annotation", getattr(field, "type", Parameter.empty) + ) + if isclass(field_type): + name = field_type.__name__ + if name.startswith("Constrained") and name.endswith("Value"): + name = name[11:-5].lower() + return getattr(builtins, name, field_type) + return field_type + return field_type + + @staticmethod + def get_default(field: FieldInfo): + """Get the default value of the field.""" + # First check if field has a default attribute at all + if not hasattr(field, "default"): + return Parameter.empty + + # Check for Ellipsis directly in field.default + if field.default is Ellipsis: + return None + + if hasattr(field, "default") and hasattr(field.default, "default"): + default_val = field.default.default + if default_val is PydanticUndefined: + return Parameter.empty + if default_val is Ellipsis: + return None + return default_val + return field.default + + @staticmethod + def get_extra(field: FieldInfo) -> dict: + """Get json schema extra.""" + field_default = getattr(field, "default", None) + if field_default: + # Getting json_schema_extra without changing the original dict + json_schema_extra = getattr(field_default, "json_schema_extra", {}).copy() + json_schema_extra.pop("choices", None) + return json_schema_extra + return {} + + @staticmethod + def is_annotated_dc(annotation) -> bool: + """Check if the annotation is an annotated dataclass.""" + return isinstance(annotation, _AnnotatedAlias) and hasattr( + annotation.__args__[0], "__dataclass_fields__" + ) + + @staticmethod + def is_data_processing_function(path: str) -> bool: + """Check if the function is a data processing function.""" + methods = PathHandler.build_route_map()[path].methods # type: ignore + return "POST" in methods or "PUT" in methods or "PATCH" in methods + + @staticmethod + def is_deprecated_function(path: str) -> bool: + """Check if the function is deprecated.""" + return getattr(PathHandler.build_route_map()[path], "deprecated", False) + + @staticmethod + def get_deprecation_message(path: str) -> str: + """Get the deprecation message.""" + return getattr(PathHandler.build_route_map()[path], "summary", "") + + @staticmethod + def reorder_params( + params: Dict[str, Parameter], + var_kw: Optional[List[str]] = None, + for_docstring: bool = False, + ) -> "OrderedDict[str, Parameter]": + """Reorder the params based on context. + + For function signatures: provider is placed last (before VAR_KEYWORD) + For docstrings: provider is placed first + """ + formatted_keys = list(params.keys()) + + if for_docstring and "provider" in formatted_keys: + # For docstrings: Place "provider" first + formatted_keys.remove("provider") + formatted_keys.insert(0, "provider") + else: + # For function signatures: Place "provider" and VAR_KEYWORD at the end + for k in ["provider"] + (var_kw or []): + if k in formatted_keys: + formatted_keys.remove(k) + formatted_keys.append(k) + + od: OrderedDict[str, Parameter] = OrderedDict() + for k in formatted_keys: + od[k] = params[k] + + return od + + @staticmethod + def format_params( + path: str, parameter_map: Dict[str, Parameter] + ) -> OrderedDict[str, Parameter]: + """Format the params.""" + + parameter_map.pop("cc", None) + # we need to add the chart parameter here bc of the docstring generation + if CHARTING_INSTALLED and path.replace("/", "_")[1:] in Charting.functions(): + parameter_map["chart"] = Parameter( + name="chart", + kind=Parameter.POSITIONAL_OR_KEYWORD, + annotation=Annotated[ + bool, + Query( + description="Whether to create a chart or not, by default False.", + ), + ], + default=False, + ) + + formatted: Dict[str, Parameter] = {} + var_kw = [] + for name, param in parameter_map.items(): + # Case 1: Handle Query objects inside Annotated + if isinstance(param.annotation, _AnnotatedAlias): + query_obj = None + # Look for Query object in the metadata + for meta in param.annotation.__metadata__: + if ( + hasattr(meta, "__class__") + and "Query" in meta.__class__.__name__ + ): + query_obj = meta + break + if query_obj: + description = getattr(query_obj, "description", "") or "" + default_value = getattr(query_obj, "default", Parameter.empty) + if default_value is PydanticUndefined: + default_value = Parameter.empty + + # Create a new annotation with OpenBBField containing the description + formatted[name] = Parameter( + name=name, + kind=param.kind, + annotation=Annotated[ + param.annotation.__args__[0], # Get the original type + OpenBBField( + description=description, + ), + ], + default=param.default, + ) + continue + + # Case 2: Handle Query objects as default values + if ( + hasattr(param.default, "__class__") + and "Query" in param.default.__class__.__name__ + ): + query_obj = param.default + description = getattr(query_obj, "description", "") or "" + default_value = getattr(query_obj, "default", "") + formatted[name] = Parameter( + name=name, + kind=param.kind, + annotation=Annotated[ + param.annotation, + OpenBBField( + description=description, + ), + ], + default=( + Parameter.empty + if default_value is PydanticUndefined + or default_value is Ellipsis + else default_value + ), + ) + continue + + if name == "extra_params": + formatted[name] = Parameter(name="kwargs", kind=Parameter.VAR_KEYWORD) + var_kw.append(name) + elif name == "provider_choices": + if param.annotation != Parameter.empty and hasattr( + param.annotation, "__args__" + ): + fields = param.annotation.__args__[0].__dataclass_fields__ + field = fields["provider"] + else: + continue + type_ = getattr(field, "type") + default_priority = getattr(type_, "__args__") + formatted["provider"] = Parameter( + name="provider", + kind=Parameter.POSITIONAL_OR_KEYWORD, + annotation=Annotated[ + Optional[MethodDefinition.get_type(field)], + OpenBBField( + description=( + "The provider to use, by default None. " + "If None, the priority list configured in the settings is used. " + f"Default priority: {', '.join(default_priority)}." + ), + ), + ], + default=None, + ) + + elif MethodDefinition.is_annotated_dc(param.annotation): + fields = param.annotation.__args__[0].__dataclass_fields__ + for field_name, field in fields.items(): + type_ = MethodDefinition.get_type(field) + default = MethodDefinition.get_default(field) + extra = MethodDefinition.get_extra(field) + new_type = MethodDefinition.get_expanded_type( + field_name, extra, type_ + ) + updated_type = type_ if new_type is ... else Union[type_, new_type] + + formatted[field_name] = Parameter( + name=field_name, + kind=Parameter.POSITIONAL_OR_KEYWORD, + annotation=updated_type, + default=default, + ) + + if isinstance(param.annotation, _AnnotatedAlias): + # Specifically look for Depends dependency rather than any annotation + has_depends = any( + hasattr(meta, "dependency") + for meta in param.annotation.__metadata__ + ) + if has_depends: + continue + + # If not a dependency, process it as a normal parameter + new_type = MethodDefinition.get_expanded_type(name) + updated_type = ( + param.annotation + if new_type is ... + else Union[param.annotation, new_type] + ) + + metadata = getattr(param.annotation, "__metadata__", []) + description = ( + getattr(metadata[0], "description", "") if metadata else "" + ) + + formatted[name] = Parameter( + name=name, + kind=param.kind, + annotation=Annotated[ + updated_type, + OpenBBField( + description=description, + ), + ], + default=MethodDefinition.get_default(param), + ) + + else: + new_type = MethodDefinition.get_expanded_type(name) + if hasattr(new_type, "__constraints__"): + types = new_type.__constraints__ + (param.annotation,) # type: ignore + updated_type = Union[types] # type: ignore + else: + updated_type = ( + param.annotation + if new_type is ... + else Union[param.annotation, new_type] + ) + + metadata = getattr(param.annotation, "__metadata__", []) + description = ( + getattr(metadata[0], "description", "") if metadata else "" + ) + + formatted[name] = Parameter( + name=name, + kind=param.kind, + annotation=Annotated[ + updated_type, + OpenBBField( + description=description, + ), + ], + default=MethodDefinition.get_default(param), + ) + if param.kind == Parameter.VAR_KEYWORD: + var_kw.append(name) + + required_params = OrderedDict() + optional_params = OrderedDict() + + for name, param in formatted.items(): + if param.default == Parameter.empty: + required_params[name] = param + else: + optional_params[name] = param + + # Combine them in the correct order + ordered_params = OrderedDict( + list(required_params.items()) + list(optional_params.items()) + ) + + return MethodDefinition.reorder_params(params=ordered_params, var_kw=var_kw) + + @staticmethod + def add_field_custom_annotations( + od: OrderedDict[str, Parameter], model_name: Optional[str] = None + ): + """Add the field custom description and choices to the param signature as annotations.""" + if not model_name: + return + + provider_interface = ProviderInterface() + + # Get fields from standard model + try: + available_fields = provider_interface.params[model_name][ + "standard" + ].__dataclass_fields__ + extra_fields = provider_interface.params[model_name][ + "extra" + ].__dataclass_fields__ + except (KeyError, AttributeError): + return + + # Combined fields + all_fields: dict = {} + all_fields.update(available_fields) + all_fields.update(extra_fields) + + for param, value in od.items(): + if param not in all_fields: + continue + + field_default = all_fields[param].default + extra = MethodDefinition.get_extra(all_fields[param]) + choices = getattr(all_fields[param], "json_schema_extra", {}).get( + "choices", [] + ) or extra.get("choices", []) + description = getattr(field_default, "description", "") + + # Handle provider-specific choices and add them to the description + provider_specific: dict = {} + for provider, provider_info in extra.items(): + if isinstance(provider_info, dict) and "choices" in provider_info: + provider_specific[provider] = provider_info["choices"] + + # Add provider-specific choices to description + if provider_specific: + # Add each provider's choices on a new line + for provider, provider_choices in provider_specific.items(): + if provider_choices: + choices_str = ", ".join(f"'{c}'" for c in provider_choices) + description += f"\nChoices for {provider}: {choices_str}" + + # Handle multiple_items_allowed + multiple_items_providers: list = [] + for provider, provider_info in extra.items(): + if ( + isinstance(provider_info, dict) + and provider_info.get("multiple_items_allowed") + or ( + isinstance(provider_info, list) + and "multiple_items_allowed" in provider_info + ) + ): + multiple_items_providers.append(provider) + + if ( + multiple_items_providers + and "Multiple comma separated items allowed for provider(s)" + not in description + ): + description += f"\nMultiple items supported by: {', '.join(multiple_items_providers)}" + + # Process the field type - if it's a Union of many Literals, simplify to base type + field_type = all_fields[param].type + simplified_type = field_type + + # If there are provider-specific choices, try to simplify the type + if ( + provider_specific + and hasattr(field_type, "__origin__") + and field_type.__origin__ is Union + ): + # Check if all union members are Literals + all_literals = True + for arg in field_type.__args__: + if not (hasattr(arg, "__origin__") and arg.__origin__ is Literal): + all_literals = False + break + + if all_literals: + # Find the base type of the literals (usually str or int) + literal_types = set() + for arg in field_type.__args__: + for lit_val in arg.__args__: + literal_types.add(type(lit_val)) + + # If all literals are of the same type, use that type + if len(literal_types) == 1: + simplified_type = next(iter(literal_types)) + + # Create field with enhanced description and possibly simplified type + field_kwargs = { + "description": description, + } + + if choices: + field_kwargs["choices"] = choices + + new_value = value.replace( + annotation=Annotated[ + ( + simplified_type + if simplified_type != field_type + else value.annotation + ), + OpenBBField(description=description), + ], + ) + + od[param] = new_value + + @staticmethod + def build_func_params(formatted_params: OrderedDict[str, Parameter]) -> str: + """Stringify function params.""" + func_params = ",\n ".join( + str(param) for param in formatted_params.values() + ) + func_params = func_params.replace("NoneType", "None") + func_params = func_params.replace( + "pandas.core.frame.DataFrame", "pandas.DataFrame" + ) + func_params = func_params.replace( + "openbb_core.provider.abstract.data.Data", "Data" + ) + func_params = func_params.replace("ForwardRef('Data')", "Data") + func_params = func_params.replace("ForwardRef('DataFrame')", "DataFrame") + func_params = func_params.replace("ForwardRef('Series')", "Series") + func_params = func_params.replace("ForwardRef('ndarray')", "ndarray") + func_params = func_params.replace("Dict", "dict").replace("List", "list") + return func_params + + @staticmethod + def build_func_returns(return_type: type) -> str: + """Build the function returns.""" + if return_type == _empty: + func_returns = "None" + elif isinstance(return_type, str): + func_returns = f"ForwardRef('{return_type}')" + elif isclass(return_type) and issubclass(return_type, OBBject): + func_returns = "OBBject" + else: + func_returns = return_type.__name__ if return_type else Any # type: ignore + + return func_returns # type: ignore + + @staticmethod + def build_command_method_signature( + func_name: str, + formatted_params: OrderedDict[str, Parameter], + return_type: type, + path: str, + model_name: Optional[str] = None, + ) -> str: + """Build the command method signature.""" + + MethodDefinition.add_field_custom_annotations( + od=formatted_params, model_name=model_name + ) # this modified `od` in place + func_params = MethodDefinition.build_func_params(formatted_params) + func_returns = MethodDefinition.build_func_returns(return_type) + + args = ( + "(config=dict(arbitrary_types_allowed=True))" + if "DataFrame" in func_params + or "Series" in func_params + or "ndarray" in func_params + else "" + ) + + code = "" + deprecated = "" + + if MethodDefinition.is_deprecated_function(path): + deprecation_message = MethodDefinition.get_deprecation_message(path) + deprecation_type_class = type( + deprecation_message.metadata # type: ignore + ).__name__ + + deprecated = "\n @deprecated(" + deprecated += f'\n "{deprecation_message}",' + deprecated += f"\n category={deprecation_type_class}," + deprecated += "\n )" + + code += "\n @exception_handler" + code += f"\n @validate{args}" + code += deprecated + code += f"\n def {func_name}(" + code += f"\n self,\n {func_params}\n ) -> {func_returns}:\n" + + return code + + @staticmethod + def build_command_method_doc( + path: str, + func: Callable, + formatted_params: OrderedDict[str, Parameter], + model_name: Optional[str] = None, + examples: Optional[List[Example]] = None, + ): + """Build the command method docstring.""" + doc = func.__doc__ + doc = DocstringGenerator.generate( + path=path, + func=func, + formatted_params=formatted_params, + model_name=model_name, + examples=examples, + ) + code = ( + f'{create_indent(2)}"""{doc}{create_indent(2)}""" # noqa: E501\n\n' + if doc + else "" + ) + + return code + + @staticmethod + def build_command_method_body( + path: str, + func: Callable, + formatted_params: Optional[OrderedDict[str, Parameter]] = None, + ): + """Build the command method implementation.""" + if formatted_params is None: + formatted_params = OrderedDict() + + sig = signature(func) + parameter_map = dict(sig.parameters) + parameter_map.pop("cc", None) + + # Extract dependencies without disrupting other code paths + dependency_calls: list = [] + dependency_names = set() + + # Process dependencies + for name, param in parameter_map.items(): + if isinstance(param.annotation, _AnnotatedAlias): + for meta in param.annotation.__metadata__: + if hasattr(meta, "dependency") and meta.dependency is not None: + dependency_func = meta.dependency + func_name = dependency_func.__name__ + dependency_calls.append(f" {name} = {func_name}()") + dependency_names.add(name) + + code = "" + + if dependency_calls: + code += "\n".join(dependency_calls) + "\n\n" + + if CHARTING_INSTALLED and path.replace("/", "_")[1:] in Charting.functions(): + parameter_map["chart"] = Parameter( + name="chart", + kind=Parameter.POSITIONAL_OR_KEYWORD, + annotation=bool, + default=False, + ) + + if MethodDefinition.is_deprecated_function(path): + deprecation_message = MethodDefinition.get_deprecation_message(path) + code += " simplefilter('always', DeprecationWarning)\n" + code += f""" warn("{deprecation_message}", category=DeprecationWarning, stacklevel=2)\n\n""" + + info = {} + + code += " return self._run(\n" + code += f""" "{path}",\n""" + code += " **filter_inputs(\n" + + # Check if we already have a kwargs parameter (VAR_KEYWORD) in formatted_params + has_kwargs = any( + param.kind == Parameter.VAR_KEYWORD for param in formatted_params.values() + ) + has_extra_params = False + + for name, param in parameter_map.items(): + if name == "extra_params": + has_extra_params = True + fields = ( + param.annotation.__args__[0].__dataclass_fields__ + if hasattr(param.annotation, "__args__") + else param.annotation + ) + values = {k: k for k in fields} + for k in values: + if extra := MethodDefinition.get_extra(fields[k]): + info[k] = extra + code += f" {name}=kwargs,\n" + elif name == "provider_choices": + field = param.annotation.__args__[0].__dataclass_fields__["provider"] + available = field.type.__args__ + cmd = path.strip("/").replace("/", ".") + code += " provider_choices={\n" + code += ' "provider": self._get_provider(\n' + code += " provider,\n" + code += f' "{cmd}",\n' + code += f" {available},\n" + code += " )\n" + code += " },\n" + elif MethodDefinition.is_annotated_dc(param.annotation): + fields = param.annotation.__args__[0].__dataclass_fields__ + values = {k: k for k in fields} + code += f" {name}={{\n" + for k, v in values.items(): + code += f' "{k}": {v},\n' + if extra := MethodDefinition.get_extra(fields[k]): + info[k] = extra + code += " },\n" + else: + code += f" {name}={name},\n" + + if info: + code += f" info={info},\n" + + if MethodDefinition.is_data_processing_function(path): + code += " data_processing=True,\n" + + # Add kwargs parameter + if has_kwargs and not has_extra_params: + code += " **kwargs,\n" + + code += " )\n" + code += " )\n" + + return code + + @classmethod + def get_expanded_type( + cls, + field_name: str, + extra: Optional[dict] = None, + original_type: Optional[type] = None, + ) -> object: + """Expand the original field type.""" + if extra and any( + ( + v.get("multiple_items_allowed") + if isinstance(v, dict) + # For backwards compatibility, before this was a list + else "multiple_items_allowed" in v + ) + for v in extra.values() + ): + if original_type is None: + raise ValueError( + "multiple_items_allowed requires the original type to be specified." + ) + return List[original_type] # type: ignore + return cls.TYPE_EXPANSION.get(field_name, ...) + + @classmethod + def build_command_method( + cls, + path: str, + func: Callable, + model_name: Optional[str] = None, + examples: Optional[List[Example]] = None, + ) -> str: + """Build the command method.""" + func_name = func.__name__ + + sig = signature(func) + parameter_map = dict(sig.parameters) + + # Get the function source code and extract filter_inputs parameters + additional_params = {} + if hasattr(func, "__code__"): + try: + func_source = inspect.getsource(func) + + # First, find the filter_inputs block to extract parameter names + filter_inputs_match = re.search( + r"filter_inputs\(\s*(.*?)\s*\)", func_source, re.DOTALL + ) + if filter_inputs_match: + filter_inputs_text = filter_inputs_match.group(1) + filter_params = re.findall(r"(\w+)=(\w+)", filter_inputs_text) + + # Then look for parameter definitions in function body + # Find parameters defined with types in comments or actual code + param_defs = re.findall( + r"(\w+)\s*:\s*(\w+)(?:\s*=\s*([^,\n]+))?", func_source + ) + param_dict = { + name: (typ, default) for name, typ, default in param_defs + } + + # Add missing parameters preserving types when available + for param_name, param_value in filter_params: + if ( + param_name != param_value + and param_value not in parameter_map + and param_value not in ["True", "False", "None"] + ): + + # Use type from param_dict if available, otherwise Any + if param_value in param_dict: + param_type = param_dict[param_value][0] + try: + # Try to evaluate the type + annotation = ( + eval( # noqa: S307 # pylint: disable=eval-used + param_type + ) + ) + except (NameError, SyntaxError): + annotation = Any + + # Get default if available + default_str = param_dict[param_value][1] + try: + default = ( + eval( # noqa: S307 # pylint: disable=eval-used + default_str + ) + if default_str + else None + ) + except (NameError, SyntaxError): + default = None + else: + annotation = Any + default = None + + # Add parameter with preserved type/default + additional_params[param_value] = Parameter( + name=param_value, + kind=Parameter.POSITIONAL_OR_KEYWORD, + annotation=annotation, + default=default, + ) + except (OSError, TypeError): + pass + + # Add missing parameters to parameter_map + for name, param in additional_params.items(): + if name not in parameter_map: + parameter_map[name] = param + + formatted_params = cls.format_params(path=path, parameter_map=parameter_map) + + has_var_kwargs = any( + param.kind == Parameter.VAR_KEYWORD for param in formatted_params.values() + ) + + # If not, add **kwargs to formatted_params + if not has_var_kwargs: + formatted_params["kwargs"] = Parameter( + name="kwargs", + kind=Parameter.VAR_KEYWORD, + annotation=Any, + default=Parameter.empty, + ) + + code = cls.build_command_method_signature( + func_name=func_name, + formatted_params=formatted_params, + return_type=sig.return_annotation, + path=path, + model_name=model_name, + ) + code += cls.build_command_method_doc( + path=path, + func=func, + formatted_params=formatted_params, + model_name=model_name, + examples=examples, + ) + + code += cls.build_command_method_body( + path=path, func=func, formatted_params=formatted_params + ) + + return code + + +class DocstringGenerator: + """Dynamically generate docstrings for the commands.""" + + provider_interface = ProviderInterface() + + @staticmethod + def get_field_type( + field_type: Any, + is_required: bool, + target: Literal["docstring", "website"] = "docstring", + ) -> str: + """Get the implicit data type of a defined Pydantic field. + + Parameters + ---------- + field_type : Any + Typing object containing the field type. + is_required : bool + Flag to indicate if the field is required. + target : Literal["docstring", "website"] + Target to return type for. Defaults to "docstring". + + Returns + ------- + str + String representation of the field type. + """ + is_optional = not is_required + + try: + _type = field_type + + if "BeforeValidator" in str(_type): + _type = "Optional[int]" if is_optional else "int" # type: ignore + + _type = ( + str(_type) + .replace("", "") + .replace("typing.", "") + .replace("pydantic.types.", "") + .replace("datetime.datetime", "datetime") + .replace("datetime.date", "date") + .replace("NoneType", "None") + .replace(", None", "") + ) + + if "openbb_" in str(_type): + _type = ( + str(_type).split(".", maxsplit=1)[0].split("openbb_")[0] + + str(_type).rsplit(".", maxsplit=1)[-1] + ) + + _type = ( + f"Optional[{_type}]" + if is_optional and "Optional" not in str(_type) + else _type + ) + + if target == "website": + _type = re.sub(r"Optional\[(.*)\]", r"\1", _type) + + return _type + + except TypeError: + # Fallback to the annotation if the repr fails + return field_type # type: ignore + + @staticmethod + def get_OBBject_description( + results_type: str, + providers: Optional[str], + ) -> str: + """Get the command output description.""" + available_providers = providers or "Optional[str]" + indent = 2 + + obbject_description = ( + f"{create_indent(indent)}OBBject\n" + f"{create_indent(indent+1)}results : {results_type}\n" + f"{create_indent(indent+2)}Serializable results.\n" + f"{create_indent(indent+1)}provider : {available_providers}\n" + f"{create_indent(indent+2)}Provider name.\n" + f"{create_indent(indent+1)}warnings : Optional[List[Warning_]]\n" + f"{create_indent(indent+2)}List of warnings.\n" + f"{create_indent(indent+1)}chart : Optional[Chart]\n" + f"{create_indent(indent+2)}Chart object.\n" + f"{create_indent(indent+1)}extra : Dict[str, Any]\n" + f"{create_indent(indent+2)}Extra info.\n" + ) + + obbject_description = obbject_description.replace("NoneType", "None") + + return obbject_description + + @staticmethod + def build_examples( + func_path: str, + param_types: Dict[str, type], + examples: Optional[List[Example]], + target: Literal["docstring", "website"] = "docstring", + ) -> str: + """Get the example section from the examples.""" + if examples: + if target == "docstring": + prompt = ">>> " + indent = create_indent(2) + else: + prompt = "\n```python\n" + indent = create_indent(0) + + doc = f"\n{indent}Examples\n" + doc += f"{indent}--------\n" + doc += f"{indent}{prompt}from openbb import obb\n" + + for e in examples: + doc += e.to_python( + func_path=func_path, + param_types=param_types, + indentation=indent, + prompt=">>> " if target == "docstring" else "", + ) + return doc if target == "docstring" else doc + "```\n\n" + return "" + + @classmethod + def generate_model_docstring( # noqa: PLR0912, PLR0917 + cls, + model_name: str, + summary: str, + explicit_params: Dict[str, Parameter], + kwarg_params: dict, + returns: Dict[str, FieldInfo], + results_type: str, + sections: List[str], + ) -> str: + """Create the docstring for model.""" + docstring: str = "\n" + + def format_type(type_: str, char_limit: Optional[int] = None) -> str: + """Format type in docstrings.""" + type_str = str(type_) + + # Check if this is a complex union of literals (provider-specific choices) + if ( + "Union[" in type_str + and "Literal[" in type_str + and type_str.count("Literal[") > 1 + ): + # For complex Union with multiple Literals, simplify to the base type + base_types = set() + + # Extract the base types from literals first + literal_pattern = r"Literal\['([^']+)'(?:,\s*'[^']+')*\]" + for match in re.finditer(literal_pattern, type_str): + if match.group(1): + try: + val = match.group(1) + if val.isdigit(): + base_types.add("int") + elif val.isdecimal(): + base_types.add("float") + else: + base_types.add("str") + except (IndexError, AttributeError): + pass + + # Also check for explicit types in the Union + if "str" in type_str.split("[", maxsplit=1)[0].split(", "): + base_types.add("str") + if "int" in type_str.split("[", maxsplit=1)[0].split(", "): + base_types.add("int") + if "float" in type_str.split("[", maxsplit=1)[0].split(", "): + base_types.add("float") + + # Use the base types instead of the complex Union[Literal[...]] + if base_types: + if len(base_types) == 1: + type_str = next(iter(base_types)) + else: + type_str = f"Union[{', '.join(sorted(base_types))}]" + + # Apply the standard formatting + type_str = ( + type_str.replace("", "") + .replace("typing.", "") + .replace("pydantic.types.", "") + .replace("NoneType", "None") + .replace("datetime.date", "date") + .replace("datetime.datetime", "datetime") + ) + + if char_limit: + type_str = type_str[:char_limit] + ( + "..." if len(str(type_str)) > char_limit else "" + ) + return type_str + + def format_schema_description(description: str) -> str: + """Format description in docstrings.""" + description = description.replace("\n", f"\n{create_indent(2)}") + return description + + def format_description(description: str) -> str: + """Format description in docstrings with proper indentation for provider choices.""" + # Handle semicolon-separated provider descriptions + if ";" in description and "(provider:" in description: + parts = description.split(";") + formatted_parts = [] + + # Process the first part (main description) + first_part = parts[0].strip() + + # Extract the first sentence from the first part + first_sentence = "" + remainder = "" + if "." in first_part: + first_sentence, remainder = first_part.split(".", 1) + first_sentence = first_sentence.strip() + remainder = remainder.strip() + + formatted_parts.append(first_part) + parts.pop(1) + # Process subsequent parts (provider-specific descriptions) + for part in parts[1:]: + part = part.strip() # noqa: PLW2901 + + # Check if this part starts with the same first sentence + if first_sentence and part.startswith(first_sentence.rstrip(".")): + # Skip the repeated sentence and add only what follows + part_remainder = part[len(first_sentence.rstrip(".")) :].strip() + if part_remainder.startswith("."): + part_remainder = part_remainder[1:].strip() + formatted_parts.append(f"{create_indent(3)}{part_remainder}") + else: + # No repetition, add the entire part with indentation + formatted_parts.append(f"{create_indent(3)}{part.strip()}") + + # Join all parts with semicolons + description = ";\n".join(formatted_parts) + if "Choices" not in description: + return description + + # Handle provider-specific choices + if "\nChoices for " in description: + # Split into main description and provider choices part + parts = description.split("\nChoices for ") + main_desc = parts[0].rstrip() + + if len(parts) > 1: + formatted_lines = [main_desc] + + # Add each provider choice line with proper indentation + for choice_line in parts[1:]: + # Check if the line contains a newline character (due to word wrapping) + if "\n" in choice_line: + # Split the choice line at newlines + choice_parts = choice_line.split("\n") + # Add first part with "Choices for" prefix + formatted_lines.append( + f"{create_indent(3)}Choices for {choice_parts[0]}" + ) + + # Add remaining parts with proper indentation + for part in choice_parts[1:]: + if part.strip(): # Skip empty lines + formatted_lines.append( + f"{create_indent(4)}{part.strip()}" + ) + else: + # No line breaks in this choice + formatted_lines.append( + f"{create_indent(3)}Choices for {choice_line}" + ) + + return "\n".join(formatted_lines) + + # Standard behavior for other descriptions - add proper indentation to each line + return description.replace("\n", f"\n{create_indent(3)}") + + def get_param_info(parameter: Optional[Parameter]) -> Tuple[str, str]: + """Get the parameter info.""" + if not parameter: + return "", "" + annotation = getattr(parameter, "_annotation", None) + if isinstance(annotation, _AnnotatedAlias): + args = getattr(annotation, "__args__", []) if annotation else [] + p_type = args[0] if args else None + else: + p_type = annotation + type_ = ( + getattr(p_type, "__name__", "") if inspect.isclass(p_type) else p_type + ) + metadata = getattr(annotation, "__metadata__", []) + description = getattr(metadata[0], "description", "") if metadata else "" + return type_, description # type: ignore + + provider_param: Union[Parameter, dict] = {} + chart_param: Union[Parameter, dict] = {} + + # Description summary + if "description" in sections: + docstring = summary.strip("\n").replace("\n ", f"\n{create_indent(2)}") + docstring += "\n\n" + else: + docstring += "\n\n" + + if "parameters" in sections: + provider_param = explicit_params.pop("provider", {}) # type: ignore + chart_param = explicit_params.pop("chart", {}) # type: ignore + docstring += f"{create_indent(2)}Parameters\n" + docstring += f"{create_indent(2)}----------\n" + + if provider_param: + _, description = get_param_info(provider_param) # type: ignore + provider_param._annotation = str # type: ignore # pylint: disable=protected-access + docstring += f"{create_indent(2)}provider : str\n" + docstring += f"{create_indent(3)}{format_description(description)}\n" + + # Explicit parameters + for param_name, param in explicit_params.items(): + type_, description = get_param_info(param) + type_str = format_type(str(type_), char_limit=86) + docstring += f"{create_indent(2)}{param_name} : {type_str}\n" + docstring += f"{create_indent(3)}{format_description(description)}\n" + + # Kwargs + for param_name, param in kwarg_params.items(): + type_, description = get_param_info(param) + p_type = getattr(param, "type", "") + type_ = ( + getattr(p_type, "__name__", "") + if inspect.isclass(p_type) + else p_type + ) + type_ = format_type(type_) + if "NoneType" in str(type_): + type_ = f"Optional[{type_}]".replace(", NoneType", "") + + default = getattr(param, "default", "") + description = getattr(default, "description", "") + + # If empty description, check for OpenBBField annotations in parameter's annotation + if not description and hasattr(param, "annotation"): + param_annotation = getattr(param, "annotation", None) + # Check if annotation is an Annotated type + if ( + hasattr(param_annotation, "__origin__") + and param_annotation.__origin__ is Annotated # type: ignore + ): + # Extract metadata from annotation + metadata = getattr(param_annotation, "__metadata__", []) + for meta in metadata: + # Look for OpenBBField with description + if hasattr(meta, "description") and meta.description: + description = meta.description + break + + # If still no description but param default is a Query object, extract from there + if not description and hasattr(param, "default"): + param_default = getattr(param, "default") + if ( + hasattr(param_default, "__class__") + and "Query" in param_default.__class__.__name__ + ): + description = getattr(param_default, "description", "") or "" + + # Extract provider-specific choices directly from the provider interface + if hasattr(p_type, "__origin__") and p_type.__origin__ is Union: + provider_choices = {} + + # Get the list of providers for this model directly from provider_interface.model_providers + try: + providers = list( + cls.provider_interface.model_providers.get(model_name) + .__dataclass_fields__.get("provider") + .type.__args__ + ) + + # For each provider, extract their specific choices for this parameter from the map + for provider in providers: + if provider == "openbb": + continue + try: + # Directly get provider field info from the map structure + provider_field_info = ( + cls.provider_interface.map.get(model_name, {}) + .get(provider, {}) + .get("QueryParams", {}) + .get("fields", {}) + .get(param_name) + ) + + # If the field exists and has a Literal annotation + if ( + provider_field_info + and hasattr(provider_field_info, "annotation") + and hasattr( + provider_field_info.annotation, "__origin__" + ) + and provider_field_info.annotation.__origin__ + is Literal + ): + # Extract literal values as provider choices + provider_choices[provider] = list( + provider_field_info.annotation.__args__ + ) + except (KeyError, AttributeError): + continue + except (AttributeError, KeyError): + pass + + # Add provider-specific choices to description + for provider, choices in provider_choices.items(): + if choices: + # Format choices with word wrapping for readability + formatted_choices = [] + line_length = 0 + line_limit = 100 # Max line length + + for i, choice in enumerate(choices): + choice_str = f"'{choice}'" + + # If adding this choice would exceed line limit, start a new line + if ( + line_length > 0 + and line_length + len(choice_str) + 2 > line_limit + ): + # End the current line + formatted_choices.append("\n") + line_length = 0 + + # Add comma and space if not the first choice in the line + if i > 0 and line_length > 0: + formatted_choices.append(", ") + line_length += 2 + + formatted_choices.append(choice_str) + line_length += len(choice_str) + + choices_str = "".join(formatted_choices) + + description += f"\nChoices for {provider}: {choices_str}" + + docstring += f"{create_indent(2)}{param_name} : {type_}\n" + docstring += f"{create_indent(3)}{format_description(description)}\n" + + if chart_param: + _, description = get_param_info(chart_param) # type: ignore + docstring += f"{create_indent(2)}chart : bool\n" + docstring += f"{create_indent(3)}{format_description(description)}\n" + + if "returns" in sections: + # Returns + docstring += "\n" + docstring += f"{create_indent(2)}Returns\n" + docstring += f"{create_indent(2)}-------\n" + _providers, _ = get_param_info(explicit_params.get("provider")) + + docstring += cls.get_OBBject_description(results_type, _providers) + + # Schema + underline = "-" * len(model_name) + docstring += f"\n{create_indent(2)}{model_name}\n" + docstring += f"{create_indent(2)}{underline}\n" + + for name, field in returns.items(): + field_type = cls.get_field_type(field.annotation, field.is_required()) + description = getattr(field, "description", "") + docstring += f"{create_indent(2)}{field.alias or name} : {field_type}\n" + docstring += f"{create_indent(3)}{format_schema_description(description.strip())}\n" + + return docstring + + @classmethod + def generate( # pylint: disable=too-many-positional-arguments + cls, + path: str, + func: Callable, + formatted_params: OrderedDict[str, Parameter], + model_name: Optional[str] = None, + examples: Optional[List[Example]] = None, + ) -> Optional[str]: + """Generate the docstring for the function.""" + doc = func.__doc__ or "" + param_types = {} + + sections = SystemService().system_settings.python_settings.docstring_sections + max_length = ( + SystemService().system_settings.python_settings.docstring_max_length + ) + + # Parameters explicit in the function signature + explicit_params = dict(formatted_params) + explicit_params.pop("extra_params", None) + # Map of parameter names to types + param_types = {k: v.annotation for k, v in explicit_params.items()} + + if model_name: + params = cls.provider_interface.params.get(model_name, {}) + return_schema = cls.provider_interface.return_schema.get(model_name, None) + if params and return_schema: + # Parameters passed as **kwargs + kwarg_params = params["extra"].__dataclass_fields__ + param_types.update({k: v.type for k, v in kwarg_params.items()}) + # Format the annotation to hide the metadata, tags, etc. + annotation = func.__annotations__.get("return") + results_type = ( + cls._get_repr( + cls._get_generic_types( + annotation.model_fields["results"].annotation, # type: ignore[union-attr,arg-type] + [], + ), + model_name, + ) + if isclass(annotation) and issubclass(annotation, OBBject) # type: ignore[arg-type] + else model_name + ) + doc = cls.generate_model_docstring( + model_name=model_name, + summary=func.__doc__ or "", + explicit_params=explicit_params, + kwarg_params=kwarg_params, + returns=return_schema.model_fields, + results_type=results_type, + sections=sections, + ) + + if "examples" in sections: + doc += cls.build_examples( + path.replace("/", "."), + param_types, + examples, + ) + else: + doc_parts = [] + if doc: + if "\nParameters" in doc: + doc_parts = doc.split("\nParameters") + summary = doc_parts[0].strip() + elif "\nReturns" in doc: + doc_parts = doc.split("\nReturns") + summary = doc_parts[0].strip() + else: + summary = doc.strip() + else: + summary = "" + + # Format the summary + summary = summary.replace("\n ", f"\n{create_indent(2)}") + + sections = ( + SystemService().system_settings.python_settings.docstring_sections + ) + result_doc = summary + # Add parameters section if needed and not already in docstring + if ( + formatted_params + and "parameters" in sections + and "Parameters" not in doc + and [p for p_name, p in formatted_params.items() if p_name != "kwargs"] + ): + param_section = ( + f"\n\n{create_indent(2)}Parameters\n{create_indent(2)}----------\n" + ) + + # Process each parameter + for param_name, param in formatted_params.items(): + if param_name == "kwargs": + continue + + # Get parameter type and description + annotation = getattr(param, "_annotation", None) + if isinstance(annotation, _AnnotatedAlias): + # Extract from OpenBBField annotations + p_type = annotation.__args__[0] + metadata = getattr(annotation, "__metadata__", []) + description = ( + getattr(metadata[0], "description", "") if metadata else "" + ) + else: + p_type = annotation + description = "" + + # Format the type + type_str = cls.get_field_type( + p_type, param.default is Parameter.empty + ) + + # Add parameter to docstring + param_section += f"{create_indent(2)}{param_name} : {type_str}\n" + param_section += f"{create_indent(3)}{description}\n" + + result_doc += param_section + + # Add returns section if needed and not already in docstring + if "returns" in sections and "Returns" not in doc: + # Returns + returns_section = ( + f"\n\n{create_indent(2)}Returns\n{create_indent(2)}-------\n" + ) + + # Extract return annotation directly from function signature + sig = inspect.signature(func) + return_annotation = sig.return_annotation + + if ( + return_annotation + and return_annotation + != inspect._empty # pylint: disable=protected-access + ): + # Extract the type name properly + if hasattr(return_annotation, "__name__"): + type_name = return_annotation.__name__ + else: + # Handle typing objects like List[str] + type_name = str(return_annotation) + + # Clean up common typing format issues + type_name = ( + type_name.replace("typing.", "") + .replace("typing_extensions.", "") + .replace("", "") + ) + + # Add return type to docstring + returns_section += f"{create_indent(2)}{type_name}\n" + + # Check if this is a custom class (not a primitive type) + primitive_types = { + "int", + "float", + "str", + "bool", + "list", + "dict", + "tuple", + "set", + } + is_primitive = type_name.lower() in primitive_types + + if not is_primitive: + # Try to access model fields for Pydantic models + try: + if hasattr(return_annotation, "model_fields"): + # It's a Pydantic v2 model + fields = return_annotation.model_fields + + # Process each field in the model + for field_name, field in fields.items(): + # Get field type + field_type = cls.get_field_type( + field.annotation, field.is_required + ) + + # Get field description + description = ( + getattr(field, "description", "") or "" + ) + + # Add field to docstring with proper indentation + returns_section += f"{create_indent(3)}{field_name} : {field_type}\n" + if description: + returns_section += ( + f"{create_indent(4)}{description}\n" + ) + except (AttributeError, TypeError): + pass + else: + # Default case when no return annotation is available + returns_section += f"{create_indent(2)}Any\n" + + result_doc += returns_section + result_doc = result_doc.replace("\n ", f"\n{create_indent(2)}") + + doc = result_doc + "\n" + + if "examples" in sections: + doc += cls.build_examples( + path.replace("/", "."), + param_types, + examples, + ) + + if ( + max_length # pylint: disable=chained-comparison + and len(doc) > max_length + and max_length > 3 + ): + doc = doc[: max_length - 3] + "..." + return doc + + @classmethod + def _get_generic_types(cls, type_: type, items: list) -> List[str]: + """Unpack generic types recursively. + + Parameters + ---------- + type_ : type + Type to unpack. + items : list + List to store the unpacked types. + + Returns + ------- + List[str] + List of unpacked type names. + + Examples + -------- + Union[List[str], Dict[str, str], Tuple[str]] -> ["List", "Dict", "Tuple"] + """ + if hasattr(type_, "__args__"): + origin = get_origin(type_) + # pylint: disable=unidiomatic-typecheck + if ( + type(origin) is type + and origin is not Annotated + and (name := getattr(type_, "_name", getattr(type_, "__name__", None))) + ): + items.append(name.title()) + func = partial(cls._get_generic_types, items=items) + set().union(*map(func, type_.__args__), items) # type: ignore + return items + + @staticmethod + def _get_repr(items: List[str], model: str) -> str: + """Get the string representation of the types list with the model name. + + Parameters + ---------- + items : List[str] + List of type names. + model : str + Model name to access the model providers. + + Returns + ------- + str + String representation of the unpacked types list. + + Examples + -------- + [List, Dict, Tuple], M -> "Union[List[M], Dict[str, M], Tuple[M]]" + """ + if s := [ + f"{i}[str, {model}]" if i.lower() == "dict" else f"{i}[{model}]" + for i in items + ]: + return f"Union[{', '.join(s)}]" if len(s) > 1 else s[0] + return model + + +class PathHandler: + """Handle the paths for the Platform.""" + + @staticmethod + def build_route_map() -> Dict[str, BaseRoute]: + """Build the route map.""" + router = RouterLoader.from_extensions() + route_map = {route.path: route for route in router.api_router.routes} # type: ignore + + return route_map + + @staticmethod + def build_path_list(route_map: Dict[str, BaseRoute]) -> List[str]: + """Build the path list.""" + path_list = [] + for route_path in route_map: + if route_path not in path_list: + path_list.append(route_path) + + sub_path_list = route_path.split("/") + + for length in range(len(sub_path_list)): + sub_path = "/".join(sub_path_list[:length]) + if sub_path not in path_list: + path_list.append(sub_path) + + return path_list + + @staticmethod + def get_route(path: str, route_map: Dict[str, BaseRoute]): + """Get the route from the path.""" + return route_map.get(path) + + @staticmethod + def get_child_path_list(path: str, path_list: List[str]) -> List[str]: + """Get the child path list.""" + direct_children = [] + for p in path_list: + if p.startswith(path): + path_reminder = p[len(path) :] # noqa: E203 + if path_reminder.count("/") == 1: + direct_children.append(p) + + return direct_children + + @staticmethod + def clean_path(path: str) -> str: + """Clean the path.""" + if path.startswith("/"): + path = path[1:] + return path.replace("-", "_").replace("/", "_") + + @classmethod + def build_module_name(cls, path: str) -> str: + """Build the module name.""" + if not path: + return "__extensions__" + return cls.clean_path(path=path) + + @classmethod + def build_module_class(cls, path: str) -> str: + """Build the module class.""" + if not path: + return "Extensions" + return f"ROUTER_{cls.clean_path(path=path)}" + + +class ReferenceGenerator: + """Generate the reference for the Platform.""" + + REFERENCE_FIELDS = [ + "deprecated", + "description", + "examples", + "parameters", + "returns", + "data", + ] + + # pylint: disable=protected-access + pi = DocstringGenerator.provider_interface + route_map = PathHandler.build_route_map() + + @classmethod + def _get_endpoint_examples( + cls, + path: str, + func: Callable, + examples: Optional[List[Example]], + ) -> str: + """Get the examples for the given standard model or function. + + For a given standard model or function, the examples are fetched from the + list of Example objects and formatted into a string. + + Parameters + ---------- + path : str + Path of the router. + func : Callable + Router endpoint function. + examples : Optional[List[Example]] + List of Examples (APIEx or PythonEx type) + for the endpoint. + + Returns + ------- + str: + Formatted string containing the examples for the endpoint. + """ + sig = signature(func) + parameter_map = dict(sig.parameters) + formatted_params = MethodDefinition.format_params( + path=path, parameter_map=parameter_map + ) + explicit_params = dict(formatted_params) + explicit_params.pop("extra_params", None) + param_types = {k: v.annotation for k, v in explicit_params.items()} + + return DocstringGenerator.build_examples( + path.replace("/", "."), + param_types, + examples, + "website", + ) + + @classmethod + def _get_provider_parameter_info(cls, model: str) -> Dict[str, Any]: + """Get the name, type, description, default value and optionality information for the provider parameter. + + Parameters + ---------- + model : str + Standard model to access the model providers. + + Returns + ------- + Dict[str, Any] + Dictionary of the provider parameter information + """ + pi_model_provider = cls.pi.model_providers[model] + provider_params_field = pi_model_provider.__dataclass_fields__["provider"] + + name = provider_params_field.name + field_type = DocstringGenerator.get_field_type( + provider_params_field.type, False, "website" + ) + default_priority = provider_params_field.type.__args__ + description = ( + "The provider to use, by default None. " + "If None, the priority list configured in the settings is used. " + f"Default priority: {', '.join(default_priority)}." + ) + + provider_parameter_info = { + "name": name, + "type": field_type, + "description": description, + "default": None, + "optional": True, + } + + return provider_parameter_info + + @classmethod + def _get_provider_field_params( + cls, model: str, params_type: str, provider: str = "openbb" + ) -> List[Dict[str, Any]]: + """Get the fields of the given parameter type for the given provider of the standard_model.""" + provider_field_params = [] + expanded_types = MethodDefinition.TYPE_EXPANSION + model_map = cls.pi.map[model] + + # First, check if the provider class itself has __json_schema_extra__ + # This contains class-level schema information that applies to fields + class_schema_extra = {} + try: + # Get the actual provider class + provider_class = model_map[provider][params_type]["class"] + # Check for class-level __json_schema_extra__ attribute + if hasattr(provider_class, "__json_schema_extra__"): + class_schema_extra = provider_class.__json_schema_extra__ + except (KeyError, AttributeError): + pass + + for field, field_info in model_map[provider][params_type]["fields"].items(): + # Start with class-level schema information for this field if it exists + extra = {} + choices = None + if field in class_schema_extra: + extra = class_schema_extra[field].copy() + choices = extra.get("choices") + + # Then apply field-level schema extra (which takes precedence) + field_extra = field_info.json_schema_extra or {} + extra.update(field_extra) + if "choices" in field_extra: + choices = field_extra["choices"] + + # Determine the field type, expanding it if necessary + field_type = field_info.annotation + is_required = field_info.is_required() + field_type_str = DocstringGenerator.get_field_type( + field_type, is_required, "website" + ) + + # Handle case where field_type_str contains ", optional" suffix + if ", optional" in field_type_str: + field_type_str = field_type_str.replace(", optional", "") + is_required = False + + cleaned_description = str(field_info.description).strip().replace('"', "'") + + # Add information for the providers supporting multiple symbols + if params_type == "QueryParams" and extra: + providers: List = [] + for p, v in extra.items(): + if isinstance(v, dict) and v.get("multiple_items_allowed"): + providers.append(p) + if "choices" in v: + choices = v.get("choices") + elif isinstance(v, list) and "multiple_items_allowed" in v: + providers.append(p) + elif isinstance(v, dict) and "choices" in v: + choices = v.get("choices") + + if providers: + multiple_items = ", ".join(providers) + cleaned_description += ( + f" Multiple items allowed for provider(s): {multiple_items}." + ) + field_type_str = f"Union[{field_type_str}, List[{field_type_str}]]" + elif field in expanded_types: + expanded_type = DocstringGenerator.get_field_type( + expanded_types[field], is_required, "website" + ) + field_type_str = f"Union[{field_type_str}, {expanded_type}]" + + default_value = ( + "" if field_info.default is PydanticUndefined else field_info.default + ) + + provider_field_params.append( + { + "name": field, + "type": field_type_str, + "description": cleaned_description, + "default": default_value, + "optional": not is_required, + "choices": choices, + } + ) + + return provider_field_params + + @staticmethod + def _get_obbject_returns_fields( + model: str, + providers: str, + ) -> List[Dict[str, str]]: + """Get the fields of the OBBject returns object for the given standard_model. + + Parameters + ---------- + model : str + Standard model of the returned object. + providers : str + Available providers for the model. + + Returns + ------- + List[Dict[str, str]] + List of dictionaries containing the field name, type, description, default + and optionality of each field. + """ + obbject_list = [ + { + "name": "results", + "type": f"list[{model}]", + "description": "Serializable results.", + }, + { + "name": "provider", + "type": f"Optional[{providers}]", + "description": "Provider name.", + }, + { + "name": "warnings", + "type": "Optional[list[Warning_]]", + "description": "List of warnings.", + }, + { + "name": "chart", + "type": "Optional[Chart]", + "description": "Chart object.", + }, + { + "name": "extra", + "type": "dict[str, Any]", + "description": "Extra info.", + }, + ] + + return obbject_list + + @staticmethod + def _get_post_method_parameters_info( + docstring: str, + ) -> List[Dict[str, Union[bool, str]]]: + """Get the parameters for the POST method endpoints. + + Parameters + ---------- + docstring : str + Router endpoint function's docstring + + Returns + ------- + List[Dict[str, str]] + List of dictionaries containing the name, type, description, default + and optionality of each parameter. + """ + parameters_list: list = [] + + # Extract only the Parameters section (between "Parameters" and "Returns") + params_section = "" + if "Parameters" in docstring and "Returns" in docstring: + params_section = docstring.split("Parameters")[1].split("Returns")[0] + elif "Parameters" in docstring: + params_section = docstring.split("Parameters")[1] + else: + return parameters_list # No parameters section found + + # Define a regex pattern to match parameter blocks + # This pattern looks for a parameter name followed by " : ", then captures the type and description + pattern = re.compile( + r"\n\s*(?P\w+)\s*:\s*(?P[^\n]+?)(?:\s*=\s*(?P[^\n]+))?\n\s*(?P[^\n]+)" + ) + + # Find all matches in the parameters section only + matches = pattern.finditer(params_section) + + if matches: + # Iterate over the matches to extract details + for match in matches: + # Extract named groups as a dictionary + param_info = match.groupdict() + + # Clean up and process the type string + param_type = param_info["type"].strip() + + # Check for ", optional" in type and handle appropriately + is_optional = "Optional" in param_type or ", optional" in param_type + if ", optional" in param_type: + param_type = param_type.replace(", optional", "") + + # If no default value is captured, set it to an empty string + default_value = ( + param_info["default"] if param_info["default"] is not None else "" + ) + param_type = ( + str(param_type) + .replace("openbb_core.provider.abstract.data.Data", "Data") + .replace("List", "list") + .replace("Dict", "dict") + .replace("NoneType", "None") + ) + # Create a new dictionary with fields in the desired order + param_dict = { + "name": param_info["name"], + "type": ReferenceGenerator._clean_string_values(param_type), + "description": ReferenceGenerator._clean_string_values( + param_info["description"] + ), + "default": default_value, + "optional": is_optional, + } + + # Append the dictionary to the list + parameters_list.append(param_dict) + + return parameters_list + + @staticmethod + def _clean_string_values(value: Any) -> Any: + """Convert double quotes in string values to single quotes and fix type references. + + Parameters + ---------- + value : Any + The value to clean + + Returns + ------- + Any + The cleaned value + """ + if isinstance(value, str): + # Fix fully qualified Data type references + value = re.sub( + r"List\[openbb_core\.provider\.abstract\.data\.Data\]", + "list[Data]", + value, + ) + value = re.sub( + r"openbb_core\.provider\.abstract\.data\.Data", "Data", value + ) + + # Handle Literal types specifically + if ( + "Literal[" in value + and "]" in value + and "'" not in value + and '"' not in value + ): + # Extract the content between Literal[ and ] + start_idx = value.find("Literal[") + len("Literal[") + end_idx = value.rfind("]") + if start_idx < end_idx: + content = value[start_idx:end_idx] + # Add single quotes around each value + values = [f"'{v.strip()}'" for v in content.split(",")] + # Reconstruct the Literal type + return f"Literal[{', '.join(values)}]" + + # Replace capitalized Dict with lowercase dict + value = re.sub(r"\bDict\b", "dict", value) + + # Replace capitalized List with lowercase list + value = re.sub(r"\bList\b", "list", value) + + # Replace double quotes with single quotes for other strings + return value.replace('"', "'") + if isinstance(value, dict): + return { + k: ReferenceGenerator._clean_string_values(v) for k, v in value.items() + } + if isinstance(value, list): + return [ReferenceGenerator._clean_string_values(item) for item in value] + + return value + + @staticmethod + def _get_function_signature_info(func: Callable) -> List[Dict[str, Any]]: + """Extract parameter information directly from function signature.""" + params_info = [] + sig = signature(func) + + for name, param in sig.parameters.items(): + # Skip 'self' and context parameters + if name in ["self", "cc"]: + continue + + # Skip parameters with dependency injections through annotations + if isinstance(param.annotation, _AnnotatedAlias) and any( + hasattr(meta, "dependency") for meta in param.annotation.__metadata__ + ): + continue + + # Skip parameters with Depends in default values + if param.default is not Parameter.empty: + default_str = str(param.default) + if "Depends" in default_str: + continue + + param_type = param.annotation + is_optional = ( + param.default is not Parameter.empty + ) # Parameter is optional if it has a default value + description = "" + choices = None + default = param.default if param.default is not Parameter.empty else None + json_extra = None + + # Check if type is optional + if ( + hasattr(param_type, "__origin__") + and param_type.__origin__ is Union + and (type(None) in param_type.__args__ or None in param_type.__args__) + ): + # Check if None or NoneType is in the union + is_optional = True + # Extract the actual type (excluding None) + non_none_args = [ + arg + for arg in param_type.__args__ + if arg is not type(None) and arg is not None + ] + if len(non_none_args) == 1: + param_type = non_none_args[0] + + # In ReferenceGenerator._get_function_signature_info, modify the Annotated handling: + if isinstance(param_type, _AnnotatedAlias): + base_type = param_type.__args__[0] + for meta in param_type.__metadata__: + if hasattr(meta, "description"): + description = meta.description + if hasattr(meta, "choices"): + choices = meta.choices + if hasattr(meta, "default"): + default = meta.default + if hasattr(meta, "json_schema_extra"): + json_extra = meta.json_schema_extra + + # Add handling for Query objects inside Annotated metadata + if ( + hasattr(meta, "__class__") + and "Query" in meta.__class__.__name__ + ): + description = getattr(meta, "description", "") or description + json_extra = ( + getattr(meta, "json_schema_extra", {}) or json_extra + ) + default_value = getattr(meta, "default", None) + if default_value not in [ + Parameter.empty, + PydanticUndefined, + Ellipsis, + ]: + default = default_value + + # Set the actual type to the base type + param_type = base_type + + # Handle Query objects passed as parameters or default values. + if str(default.__class__).endswith("Query'>") or "Query" in str( + default.__class__ + ): + param_type = ( + param_type.annotation + if hasattr(param_type, "annotation") + else str(param_type) + ) + description = default.description # type: ignore + json_extra = default.json_schema_extra # type: ignore + + # Fix: A parameter is optional if it has a default value OR if is_required=False + # Currently we're only checking is_required, which is incorrect + has_default = hasattr(default, "default") and default.default not in [ # type: ignore + Parameter.empty, + PydanticUndefined, + Ellipsis, + ] + is_optional = has_default or ( + hasattr(default, "is_required") and default.is_required is False # type: ignore + ) + + default = ( + default.default # type: ignore + if default.default # type: ignore + not in [Parameter.empty, PydanticUndefined, Ellipsis] + else None + ) + + # Convert type to string representation + type_str = str(param_type) + # Clean up type string + type_str = ( + type_str.replace("", "") + .replace("typing.", "") + .replace("NoneType", "None") + ) + + params_info.append( + { + "name": name, + "type": type_str, + "description": ReferenceGenerator._clean_string_values(description), + "default": ( + None + if default in (PydanticUndefined, Parameter.empty, Ellipsis) + else ReferenceGenerator._clean_string_values(default) + ), + "optional": is_optional, + "choices": choices, + "json_schema_extra": json_extra if json_extra else None, + } + ) + + return params_info + + @staticmethod + def _get_post_method_returns_info(docstring: str) -> dict: + """Get the returns information for the POST method endpoints. + + Parameters + ---------- + docstring: str + Router endpoint function's docstring + + Returns + ------- + List[Dict[str, str]] + Single element list having a dictionary containing the name, type, + description of the return value + """ + returns_dict: dict = {} + + # Define a regex pattern to match the Returns section + # This pattern captures the model name inside "OBBject[]" and its description + match = re.search(r"Returns\n\s*-------\n\s*([^\n]+)\n\s*([^\n]+)", docstring) + + if match: + return_type = match.group(1).strip() # type: ignore + # Remove newlines and indentation from the description + description = match.group(2).strip().replace("\n", "").replace(" ", "") # type: ignore + # Adjust regex to correctly capture content inside brackets, including nested brackets + content_inside_brackets = re.search( + r"OBBject\[\s*((?:[^\[\]]|\[[^\[\]]*\])*)\s*\]", return_type + ) or re.search(r"list\[\s*((?:[^\[\]]|\[[^\[\]]*\])*)\s*\]", return_type) + return_type = ( # type: ignore + content_inside_brackets.group(1) + if content_inside_brackets is not None + else return_type + ) + + returns_dict = { + "name": "results", + "type": return_type, + "description": description, + } + + return returns_dict + + @classmethod + def get_paths( # noqa: PLR0912 + cls, route_map: Dict[str, BaseRoute] + ) -> Dict[str, Dict[str, Any]]: + """Get path reference data. + + The reference data is a dictionary containing the description, parameters, + returns and examples for each endpoint. This is currently useful for + automating the creation of the website documentation files. + + Returns + ------- + Dict[str, Dict[str, Any]] + Dictionary containing the description, parameters, returns and + examples for each endpoint. + """ + reference: Dict[str, Dict] = {} + + for path, route in route_map.items(): + # Initialize the provider parameter fields as an empty dictionary + provider_parameter_fields = {"type": ""} + # Initialize the reference fields as empty dictionaries + reference[path] = {field: {} for field in cls.REFERENCE_FIELDS} + # Route method is used to distinguish between GET and POST methods + route_method = getattr(route, "methods", None) + # Route endpoint is the callable function + route_func = getattr(route, "endpoint", lambda: None) + # Attribute contains the model and examples info for the endpoint + openapi_extra = getattr(route, "openapi_extra", {}) + # Standard model is used as the key for the ProviderInterface Map dictionary + standard_model = openapi_extra.get("model", "") + # Add endpoint model for GET methods + reference[path]["model"] = standard_model + # Add endpoint deprecation details + reference[path]["deprecated"] = { + "flag": MethodDefinition.is_deprecated_function(path), + "message": MethodDefinition.get_deprecation_message(path), + } + # Add endpoint examples + examples = openapi_extra.pop("examples", []) + reference[path]["examples"] = cls._get_endpoint_examples( + path, + route_func, + examples, # type: ignore + ) + validate_output = not openapi_extra.pop("no_validate", None) + model_map = cls.pi.map.get(standard_model, {}) + reference[path]["openapi_extra"] = { + k: v for k, v in openapi_extra.items() if v + } + + # Add data for the endpoints having a standard model + if route_method == {"GET"} and model_map: + reference[path]["description"] = getattr( + route, "description", "No description available." + ) + for provider in model_map: + if provider == "openbb": + # openbb provider is always present hence its the standard field + reference[path]["parameters"]["standard"] = ( + cls._get_provider_field_params( + standard_model, "QueryParams" + ) + ) + # Add `provider` parameter fields to the openbb provider + provider_parameter_fields = cls._get_provider_parameter_info( + standard_model + ) + + # Add endpoint data fields for standard provider + reference[path]["data"]["standard"] = ( + cls._get_provider_field_params(standard_model, "Data") + ) + continue + + # Adds provider specific parameter fields to the reference + reference[path]["parameters"][provider] = ( + cls._get_provider_field_params( + standard_model, "QueryParams", provider + ) + ) + + # Adds provider specific data fields to the reference + reference[path]["data"][provider] = cls._get_provider_field_params( + standard_model, "Data", provider + ) + + # Remove choices from standard parameters if they exist in provider-specific parameters + provider_param_names = { + p["name"] for p in reference[path]["parameters"][provider] + } + + for i, param in enumerate( + reference[path]["parameters"]["standard"] + ): + param_name = param.get("name") + if ( + param_name in provider_param_names + and param.get("choices") is not None + ): + # This parameter has a provider-specific version, so remove choices from standard + reference[path]["parameters"]["standard"][i][ + "choices" + ] = None + + # Add endpoint returns data + if validate_output is False: + reference[path]["returns"]["Any"] = { + "description": "Unvalidated results object.", + } + else: + providers = provider_parameter_fields["type"] + reference[path]["returns"]["OBBject"] = ( + cls._get_obbject_returns_fields(standard_model, providers) + ) + # Add data for the endpoints without a standard model (data processing endpoints) + else: + # Get function signature information + sig_params = cls._get_function_signature_info(route_func) + + # Non-model method's router `description` attribute is unreliable as it may or + # may not contain the "Parameters" and "Returns" sections. Hence, the + # endpoint function docstring is used instead. + docstring = getattr(route_func, "__doc__", "") + + if not docstring: + continue + + description = docstring.split("Parameters")[0].strip() + # Remove extra spaces in between the string + reference[path]["description"] = re.sub(" +", " ", description) + + # Combine signature parameters with docstring parameters + docstring_params = cls._get_post_method_parameters_info(docstring) + + # Create a merged parameter list with signature info taking precedence + merged_params: dict = {} + for param in docstring_params: + merged_params[param["name"]] = param + + for param in sig_params: + name = param["name"] + if name in merged_params: + # Update existing param with signature info + for key, value in param.items(): + if value and not (key == "description" and not value): + merged_params[name][key] = value + else: + merged_params[name] = param + + # Add endpoint parameters fields from the merged info + reference[path]["parameters"]["standard"] = list(merged_params.values()) + + # Add endpoint returns data + # If the endpoint is not validated, the return type is set to Any + if validate_output is False: + reference[path]["returns"]["Any"] = { + "description": "Unvalidated results object.", + } + else: + model_fields: list = [] + # First try to get from function signature + returns_info = cls._extract_return_type(route_func) + + if not returns_info: + # Then try to get return info from docstring + returns_info = cls._get_post_method_returns_info(docstring) + + return_annotation = inspect.signature(route_func).return_annotation + + is_generic_obbject = ( + isinstance(returns_info, dict) + and "OBBject" in returns_info + and any( + item.get("name") == "results" and "Data" in item.get("type") + for item in returns_info.get("OBBject", []) + ) + ) + + # Set returns field directly + reference[path]["returns"] = returns_info + reference[path]["model"] = None + reference[path]["data"] = {} + + if isinstance(returns_info, str) and "[" in returns_info: + # Extract inner type from container type (e.g., "list[ModelName]") + match = re.search(r"\[(.*?)\]", returns_info) + if match: + inner_type_name = match.group(1) + # Try to find the actual model class + for module in sys.modules.values(): + if hasattr(module, inner_type_name): + model_class = getattr(module, inner_type_name) + if hasattr(model_class, "model_fields"): + # Found the model class, extract its fields + model_fields = [] + for ( + field_name, + field, + ) in model_class.model_fields.items(): + if field_name.startswith("_"): + continue + + field_type = ( + DocstringGenerator.get_field_type( + field.annotation, + not field.is_required(), + "website", + ) + ) + + model_fields.append( + { + "name": field_name, + "type": ReferenceGenerator._clean_string_values( + field_type + ), + "description": ( + ReferenceGenerator._clean_string_values( + field.description + ) + if field.description + else "" + ), + "default": ( + field.default + if field.default + and field.default + != PydanticUndefined + else "" + ), + "optional": not field.is_required(), + } + ) + + if model_fields: + list_match = re.search( + r"list\[(.*?)\]", returns_info + ) + model_name = ( + list_match.group(1) + if list_match + else returns_info + ) + + reference[path]["data"][ + model_name + ] = model_fields + break + # For Pydantic models, extract the fields + elif ( + hasattr(return_annotation, "model_fields") + and not is_generic_obbject + ): + for field_name, field in return_annotation.model_fields.items(): + # Skip private fields + if field_name.startswith("_"): + continue + + field_type = DocstringGenerator.get_field_type( + field.annotation, not field.is_required(), "website" + ) + + model_fields.append( + { + "name": field_name, + "type": field_type, + "description": ( + field.description.replace('"', "'") + if field.description + else "" + ), + "default": ( + field.default + if field.default + and field.default != PydanticUndefined + else "" + ), + "optional": field.is_required(), + } + ) + # For results field in OBBject returns, check for actual model type + + if isinstance(returns_info, dict) and "OBBject" in returns_info: + # For OBBject returns, extract model name from results field type + model_name = None + for item in returns_info["OBBject"]: + if item["name"] == "results": + result_type = item["type"] + # Extract model name from result type (e.g., "list[ModelName]" -> "ModelName") + list_match = re.search(r"list\[(.*?)\]", result_type) + model_name = ( + list_match.group(1) if list_match else result_type + ) + + # Don't add data fields for generic types like "Data" or if already in parameters + if model_name and model_name != "Data": + # Try to find the actual model class + for ( + module_name, # pylint: disable=unused-variable + module, + ) in sys.modules.items(): # noqa: W0612 + if hasattr(module, model_name): + model_class = getattr(module, model_name) + if hasattr(model_class, "model_fields"): + # Found the model class, extract its fields + model_fields = [] + for ( + field_name, + field, + ) in model_class.model_fields.items(): + if field_name.startswith("_"): + continue + + field_type = DocstringGenerator.get_field_type( + field.annotation, + not field.is_required(), + "website", + ) + + model_fields.append( + { + "name": field_name, + "type": field_type, + "description": field.description + or "", + "default": ( + field.default + if field.default + != PydanticUndefined + else "" + ), + "optional": not field.is_required(), + } + ) + + if model_fields: + reference[path]["data"][ + model_name + ] = model_fields + break + break + elif isinstance(returns_info, str): + # For string return types like "list[YFinanceUdfSearchResult]" + list_match = re.search(r"list\[(.*?)\]", returns_info) + model_name = list_match.group(1) if list_match else returns_info + + # Skip basic types + if model_name not in ( + "str", + "int", + "float", + "bool", + "Any", + "dict", + ): + # Try to find the model class + for module_name, module in sys.modules.items(): + if hasattr(module, model_name): + model_class = getattr(module, model_name) + if hasattr(model_class, "model_fields"): + # Found the model class, extract its fields + model_fields = [] + for ( + field_name, + field, + ) in model_class.model_fields.items(): + if field_name.startswith("_"): + continue + + field_type = ( + DocstringGenerator.get_field_type( + field.annotation, + not field.is_required(), + "website", + ) + ) + + model_fields.append( + { + "name": field_name, + "type": field_type, + "description": field.description + or "", + "default": ( + field.default + if field.default + != PydanticUndefined + else "" + ), + "optional": not field.is_required(), + } + ) + + if model_fields: + reference[path]["data"][ + model_name + ] = model_fields + break + else: + # For direct returns that aren't OBBject + model_name = ( + return_annotation.__name__ + if hasattr(return_annotation, "__name__") + else "Model" + ) + reference[path]["data"] = ( + {model_name: model_fields} if model_fields else {} + ) + + return reference + + @staticmethod + def _extract_return_type(func: Callable) -> Union[str, dict]: + """Extract return type information from function.""" + return_annotation = inspect.signature(func).return_annotation + + # If no return annotation, or return annotation is inspect.Signature.empty + if return_annotation is inspect.Signature.empty: + return {"type": "Any"} + + # Check if the return type is an OBBject + type_str = str(return_annotation) + + if "OBBject" in type_str or ( + hasattr(return_annotation, "__name__") + and "OBBject" in return_annotation.__name__ + ): + # Extract the model name from docstring or type annotation + result_type = "list[Data]" # Default fallback + + # Try to extract from type annotation first (more reliable) + if hasattr(return_annotation, "__origin__") and hasattr( + return_annotation, "__args__" + ): + # For OBBject[SomeType] + inner_type = return_annotation.__args__[0] + if hasattr(inner_type, "__name__"): + result_type = inner_type.__name__ + elif hasattr(inner_type, "_name") and inner_type._name: + result_type = inner_type._name + + # If not found, try to extract from docstring + if result_type == "list[Data]": + docstring = inspect.getdoc(func) or "" + if "Returns" in docstring: + returns_section = docstring.split("Returns")[1].split("\n\n")[0] + # Look for model name in docstring + patterns = [ + r"OBBject\[(.*?)\]", # OBBject[Model] + r"results : ([\w\d_]+)", # results : Model + r"Returns\s+-------\s+(\w+)", # Direct return type + ] + + for pattern in patterns: + model_match = re.search(pattern, returns_section) + if model_match: + result_type = model_match.group(1) + break + + # Ensure result_type doesn't already have a container type + if "[" in result_type and "]" not in result_type: + result_type += "]" # Add missing closing bracket + + result_type = ReferenceGenerator._clean_string_values(result_type) + # Return the standard OBBject structure with correct result type + return { + "OBBject": [ + { + "name": "results", + "type": ( + result_type + if "[" in result_type + else f"list[{result_type}]" + ), + "description": "Serializable results.", + }, + {"name": "provider", "type": None, "description": "Provider name."}, + { + "name": "warnings", + "type": "Optional[list[Warning_]]", + "description": "List of warnings.", + }, + { + "name": "chart", + "type": "Optional[Chart]", + "description": "Chart object.", + }, + { + "name": "extra", + "type": "dict[str, Any]", + "description": "Extra info.", + }, + ] + } + + # Clean up return type string + type_str = ( + type_str.replace("", "") + .replace("typing.", "") + .replace("NoneType", "None") + ) + + # Basic types handling + basic_types = ["int", "str", "dict", "bool", "float", "None", "Any"] + if type_str.lower() in [t.lower() for t in basic_types]: + return type_str.lower() + + # Check for container types with square brackets + container_match = re.search(r"(\w+)\[(.*?)\]", type_str) + if container_match: + container_type = container_match.group(1) + inner_type = container_match.group(2) + + inner_type_name = ( + inner_type.split(".")[-1] if "." in inner_type else inner_type + ) + + return f"{container_type}[{inner_type_name}]" + + model_name = ( + type_str.rsplit(".", maxsplit=1)[-1] if "." in type_str else type_str + ) + + return model_name + + @classmethod + def get_routers(cls, route_map: Dict[str, BaseRoute]) -> dict: + """Get router reference data. + + Parameters + ---------- + route_map : Dict[str, BaseRoute] + Dictionary containing the path and route object for the router. + + Returns + ------- + Dict[str, Dict[str, Any]] + Dictionary containing the description for each router. + """ + main_router = RouterLoader.from_extensions() + routers: dict = {} + for path in route_map: + path_parts = path.split("/") + # We start at 2: ["/", "some_router"] "/some_router" + i = 2 + p = "/".join(path_parts[:i]) + while p != path: + if p not in routers: + description = main_router.get_attr(p, "description") + if description is not None: + routers[p] = {"description": description} + # We go down the path to include sub-routers + i += 1 + p = "/".join(path_parts[:i]) + return routers diff --git a/openbb_platform/core/openbb_core/app/static/reference_loader.py b/openbb_platform/core/openbb_core/app/static/reference_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..8dcd7faa676ce56d7caa0c72ec7c70fa5a2622ad --- /dev/null +++ b/openbb_platform/core/openbb_core/app/static/reference_loader.py @@ -0,0 +1,43 @@ +"""ReferenceLoader class for loading reference data from a file.""" + +import json +from pathlib import Path +from typing import Dict, Optional + +from openbb_core.app.model.abstract.singleton import SingletonMeta + + +class ReferenceLoader(metaclass=SingletonMeta): + """ReferenceLoader class for loading the `reference.json` file.""" + + def __init__(self, directory: Optional[Path] = None): + """ + Initialize the ReferenceLoader with a specific directory. + + If no directory is provided, a default directory will be used. + + Attributes + ---------- + directory : Optional[Path] + The directory from which to load the assets where the reference file lives. + """ + self.directory = directory or directory or self._get_default_directory() + self._reference = self._load(self.directory / "assets" / "reference.json") + + @property + def reference(self) -> Dict[str, Dict]: + """Get the reference data.""" + return self._reference + + def _get_default_directory(self) -> Path: + """Get the default directory for loading references.""" + return Path(__file__).parents[4].resolve() / "openbb" + + def _load(self, file_path: Path): + """Load the reference data from a file.""" + try: + with open(file_path) as f: + data = json.load(f) + except FileNotFoundError: + data = {} + return data diff --git a/openbb_platform/core/openbb_core/app/static/utils/console.py b/openbb_platform/core/openbb_core/app/static/utils/console.py new file mode 100644 index 0000000000000000000000000000000000000000..4f744bcfd7c5c1a0eccc5a8e1222b93aed431ff4 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/static/utils/console.py @@ -0,0 +1,16 @@ +"""Console module.""" + +from openbb_core.env import Env + + +class Console: + """Console to be used by builder and linters.""" + + def __init__(self, verbose: bool): + """Initialize the console.""" + self.verbose = verbose + + def log(self, message: str, **kwargs): + """Console log method.""" + if self.verbose or Env().DEBUG_MODE: + print(message, **kwargs) # noqa: T201 diff --git a/openbb_platform/core/openbb_core/app/static/utils/decorators.py b/openbb_platform/core/openbb_core/app/static/utils/decorators.py new file mode 100644 index 0000000000000000000000000000000000000000..94856a1ac162f9dae0213fe65d131bdf6900a74a --- /dev/null +++ b/openbb_platform/core/openbb_core/app/static/utils/decorators.py @@ -0,0 +1,107 @@ +"""Decorators for the OpenBB Platform static assets.""" + +from functools import wraps +from typing import Any, Callable, Optional, TypeVar, overload + +from openbb_core.app.model.abstract.error import OpenBBError +from openbb_core.env import Env +from openbb_core.provider.utils.errors import EmptyDataError, UnauthorizedError +from pydantic import ValidationError, validate_call +from typing_extensions import ParamSpec + +P = ParamSpec("P") +R = TypeVar("R") + + +@overload +def validate(func: Callable[P, R]) -> Callable[P, R]: + pass + + +@overload +def validate(**dec_kwargs) -> Callable[[Callable[P, R]], Callable[P, R]]: + pass + + +def validate( + func: Optional[Callable[P, R]] = None, + **dec_kwargs, +) -> Any: + """Validate function calls.""" + + def decorated(f: Callable[P, R]): + """Use for decorating functions.""" + + @wraps(f) + def wrapper(*f_args, **f_kwargs): + return validate_call(f, **dec_kwargs)(*f_args, **f_kwargs) + + return wrapper + + return decorated if func is None else decorated(func) + + +def exception_handler(func: Callable[P, R]) -> Callable[P, R]: + """Handle exceptions, attempting to focus on the last call from the traceback.""" + + @wraps(func) + def wrapper(*f_args, **f_kwargs): + try: + return func(*f_args, **f_kwargs) + except (ValidationError, OpenBBError, Exception) as e: + if Env().DEBUG_MODE: + raise + + # Get the last traceback object from the exception + tb = e.__traceback__ + if tb: + while tb.tb_next is not None: + tb = tb.tb_next + + if isinstance(e, ValidationError): + error_list: list = [] + validation_error = f"{e.error_count()} validations error(s)" + for err in e.errors(include_url=False): + loc = ".".join( + [ + str(i) + for i in err.get("loc", ()) + if i + not in ( + "standard_params", + "extra_params", + "provider_choices", + ) + ] + ) + msg = err.get("msg", "") + _input = ( + "..." + if msg == "Missing required argument" + else err.get("input", "") + ) + prefix = f"[Data Model] {e.title}\n" if "Data" in e.title else "" + error_list.append( + f"{prefix}[Arg] {loc} -> input: {_input} -> {msg}" + ) + error_list.insert(0, validation_error) + error_str = "\n".join(error_list) + raise OpenBBError(f"\n[Error] -> {error_str}").with_traceback( + tb + ) from None + if isinstance(e, UnauthorizedError): + raise UnauthorizedError(f"\n[Error] -> {e}").with_traceback( + tb + ) from None + if isinstance(e, EmptyDataError): + raise EmptyDataError(f"\n[Empty] -> {e}").with_traceback(tb) from None + if isinstance(e, OpenBBError): + raise OpenBBError(f"\n[Error] -> {e}").with_traceback(tb) from None + if isinstance(e, Exception): + raise OpenBBError( + f"\n[Unexpected Error] -> {e.__class__.__name__} -> {e}" + ).with_traceback(tb) from None + + return None + + return wrapper diff --git a/openbb_platform/core/openbb_core/app/static/utils/filters.py b/openbb_platform/core/openbb_core/app/static/utils/filters.py new file mode 100644 index 0000000000000000000000000000000000000000..3f2c02708428810ba8a07f0c7933360d64b00519 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/static/utils/filters.py @@ -0,0 +1,67 @@ +"""OpenBB filters.""" + +from typing import Any, Dict, Optional + +from openbb_core.app.utils import check_single_item, convert_to_basemodel + + +def filter_inputs( + data_processing: bool = False, + info: Optional[Dict[str, Dict[str, Any]]] = None, + **kwargs, +) -> dict: + """Filter command inputs.""" + for key, value in kwargs.items(): + if data_processing and key == "data": + kwargs[key] = convert_to_basemodel(value) + + if info: + # Here we check if list items are passed and multiple items allowed for + # the given provider/input combination. In that case we transform the list + # into a comma-separated string + provider = kwargs.get("provider_choices", {}).get("provider") + for field, properties in info.items(): + + for p in ("standard_params", "extra_params"): + if field in kwargs.get(p, {}): + current = kwargs[p][field] + new = ( + ",".join(map(str, current)) + if isinstance(current, list) + else current + ) + + provider_properties = properties.get(provider, {}) + if isinstance(provider_properties, dict): + multiple_items_allowed = provider_properties.get( + "multiple_items_allowed" + ) + elif isinstance(provider_properties, list): + # For backwards compatibility, before this was a list + multiple_items_allowed = ( + "multiple_items_allowed" in provider_properties + ) + else: + multiple_items_allowed = True + + if not multiple_items_allowed: + check_single_item( + new, + f"{field} -> multiple items not allowed for '{provider}'", + ) + + kwargs[p][field] = new + break + else: + provider = kwargs.get("provider_choices", {}).get("provider") + for param_category in ("standard_params", "extra_params"): + if param_category in kwargs: + for field, value in kwargs[param_category].items(): + if isinstance(value, list): + kwargs[param_category][field] = ",".join(map(str, value)) + check_single_item( + kwargs[param_category][field], + f"{field} -> multiple items not allowed for '{provider}'", + ) + + return kwargs diff --git a/openbb_platform/core/openbb_core/app/static/utils/linters.py b/openbb_platform/core/openbb_core/app/static/utils/linters.py new file mode 100644 index 0000000000000000000000000000000000000000..8ee234db5b3c2bee93a46749af01bfbd67aac6c0 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/static/utils/linters.py @@ -0,0 +1,62 @@ +"""Linters for the package.""" + +import shutil +import subprocess +from pathlib import Path +from typing import ( + List, + Literal, + Optional, +) + +from openbb_core.app.static.utils.console import Console +from openbb_core.env import Env + + +class Linters: + """Run the linters for the Platform.""" + + def __init__(self, directory: Path, verbose: bool = False) -> None: + """Initialize the linters.""" + self.directory = directory + self.verbose = verbose + self.console = Console(verbose) + + def print_separator(self, symbol: str, length: int = 160): + """Print a separator.""" + self.console.log(symbol * length) + + def run( + self, + linter: Literal["black", "ruff"], + flags: Optional[List[str]] = None, + ): + """Run linter with flags.""" + if shutil.which(linter): + self.console.log(f"\n* {linter}") + self.print_separator("^") + + command = [linter] + if flags: + command.extend(flags) # type: ignore + subprocess.run( # noqa: S603 + command + list(self.directory.glob("*.py")), check=False + ) + + self.print_separator("-") + else: + self.console.log(f"\n* {linter} not found") + + def black(self): + """Run black.""" + flags = [] + if not self.verbose and not Env().DEBUG_MODE: + flags.append("--quiet") + self.run(linter="black", flags=flags) + + def ruff(self): + """Run ruff.""" + flags = ["check", "--fix"] + if not self.verbose and not Env().DEBUG_MODE: + flags.append("--silent") + self.run(linter="ruff", flags=flags) diff --git a/openbb_platform/core/openbb_core/app/utils.py b/openbb_platform/core/openbb_core/app/utils.py new file mode 100644 index 0000000000000000000000000000000000000000..4a46b30dd584de1addd7ab2a0d4cd9946fcefe10 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/utils.py @@ -0,0 +1,195 @@ +"""Utility functions for the OpenBB Core app.""" + +import ast +import json +from datetime import time +from typing import TYPE_CHECKING, Dict, List, Optional, Union + +from openbb_core.app.model.abstract.error import OpenBBError +from openbb_core.app.model.preferences import Preferences +from openbb_core.app.model.system_settings import SystemSettings +from openbb_core.provider.abstract.data import Data +from pydantic import ValidationError + +if TYPE_CHECKING: + # pylint: disable=import-outside-toplevel + from numpy import ndarray + from pandas import DataFrame, Series + + +def basemodel_to_df( + data: Union[List[Data], Data], + index: Optional[str] = None, +) -> "DataFrame": + """Convert list of BaseModel to a Pandas DataFrame.""" + # pylint: disable=import-outside-toplevel + from pandas import DataFrame, to_datetime + + if isinstance(data, list): + df = DataFrame( + [d.model_dump(exclude_none=True, exclude_unset=True) for d in data] + ) + else: + try: + df = DataFrame(data.model_dump(exclude_none=True, exclude_unset=True)) + except ValueError: + df = DataFrame( + data.model_dump(exclude_none=True, exclude_unset=True), index=["values"] + ) + + if "is_multiindex" in df.columns: + col_names = ast.literal_eval(df.multiindex_names.unique()[0]) + df = df.set_index(col_names) + df = df.drop(["is_multiindex", "multiindex_names"], axis=1) + + # If the date column contains dates only, convert them to a date to avoid encoding time data. + if "date" in df.columns: + df["date"] = df["date"].apply(to_datetime) + if all(t.time() == time(0, 0) for t in df["date"]): + df["date"] = df["date"].apply(lambda x: x.date()) + + if index and index in df.columns: + if index == "date": + df.set_index("date", inplace=True) + df.sort_index(axis=0, inplace=True) + else: + df = df.set_index(index) if index and index in df.columns else df + + return df + + +def df_to_basemodel( + df: Union["DataFrame", "Series"], index: bool = False +) -> List[Data]: + """Convert from a Pandas DataFrame to list of BaseModel.""" + # pylint: disable=import-outside-toplevel + from pandas import MultiIndex, Series, to_datetime + + is_multiindex = isinstance(df.index, MultiIndex) + + if not is_multiindex and (index or df.index.name): + df = df.reset_index() + if isinstance(df, Series): + df = df.to_frame() + + # Check if df has multiindex. If so, add the index names to the df and a boolean column + if isinstance(df.index, MultiIndex): + df["is_multiindex"] = True + df["multiindex_names"] = str(df.index.names) + df = df.reset_index() + + # Converting to JSON will add T00:00:00.000 to all dates with no time element unless we format it as a string first. + if "date" in df.columns: + df["date"] = df["date"].apply(to_datetime) + if all(t.time() == time(0, 0) for t in df["date"]): + df["date"] = df["date"].apply(lambda x: x.date().strftime("%Y-%m-%d")) + + return [ + Data(**d) for d in json.loads(df.to_json(orient="records", date_format="iso")) + ] + + +def list_to_basemodel(data_list: List) -> List[Data]: + """Convert a list to a list of BaseModel.""" + # pylint: disable=import-outside-toplevel + from pandas import DataFrame, Series + + base_models = [] + for item in data_list: + if isinstance(item, Data) or issubclass(type(item), Data): + base_models.append(item) + elif isinstance(item, dict): + base_models.append(Data(**item)) + elif isinstance(item, (DataFrame, Series)): + base_models.extend(df_to_basemodel(item)) + else: + raise ValueError(f"Unsupported list item type: {type(item)}") + return base_models + + +def dict_to_basemodel(data_dict: Dict) -> Data: + """Convert a dictionary to BaseModel.""" + try: + return Data(**data_dict) + except ValidationError as e: + raise ValueError( + f"Validation error when converting dict to BaseModel: {e}" + ) from e + + +def ndarray_to_basemodel(array: "ndarray") -> List[Data]: + """Convert a NumPy array to list of BaseModel.""" + # Assuming a 2D array where rows are records + if array.ndim != 2: + raise ValueError("Only 2D arrays are supported.") + return [ + Data(**{f"column_{i}": value for i, value in enumerate(row)}) for row in array + ] + + +def convert_to_basemodel(data) -> Union[Data, List[Data]]: + """Dispatch function to convert different types to BaseModel.""" + # pylint: disable=import-outside-toplevel + from numpy import ndarray + from pandas import DataFrame, Series + + if isinstance(data, Data) or issubclass(type(data), Data): + return data + if isinstance(data, list): + return list_to_basemodel(data) + if isinstance(data, dict): + return dict_to_basemodel(data) + if isinstance(data, (DataFrame, Series)): + return df_to_basemodel(data) + if isinstance(data, ndarray): + return ndarray_to_basemodel(data) + raise ValueError(f"Unsupported data type: {type(data)}") + + +def get_target_column(df: "DataFrame", target: str) -> "Series": + """Get target column from time series data.""" + if target not in df.columns: + choices = ", ".join(df.columns) + raise ValueError( + f"Target column '{target}' not found in data. Choose from {choices}" + ) + return df[target] + + +def get_target_columns(df: "DataFrame", target_columns: List[str]) -> "DataFrame": + """Get target columns from time series data.""" + # pylint: disable=import-outside-toplevel + from pandas import DataFrame + + df_result = DataFrame() + for target in target_columns: + df_result[target] = get_target_column(df, target).to_frame() + return df_result + + +def get_user_cache_directory() -> str: + """Get user cache directory.""" + file = SystemSettings().model_dump()["user_settings_path"] + + with open(file) as settings_file: + contents = settings_file.read() + + try: + settings = json.loads(contents)["preferences"] + except KeyError: + settings = None + cache_dir = ( + settings["cache_directory"] + if settings and "cache_directory" in settings + else Preferences().cache_directory + ) + return cache_dir + + +def check_single_item( + value: Optional[str], message: Optional[str] = None +) -> Optional[str]: + """Check that string contains a single item.""" + if value and isinstance(value, str) and ("," in value or ";" in value): + raise OpenBBError(message if message else "multiple items not allowed") + return value diff --git a/openbb_platform/core/openbb_core/app/version.py b/openbb_platform/core/openbb_core/app/version.py new file mode 100644 index 0000000000000000000000000000000000000000..84bf3f555598772351b05dd6edc9ae69716c4fd4 --- /dev/null +++ b/openbb_platform/core/openbb_core/app/version.py @@ -0,0 +1,69 @@ +"""Version script for the OpenBB Platform.""" + +from importlib.metadata import ( + PackageNotFoundError, + version as pkg_version, +) +from pathlib import Path + +PACKAGE = "openbb" + + +def get_package_version(package: str): + """Retrieve the version of a package from installed pip packages.""" + is_nightly = False + try: + version = pkg_version(package) + except PackageNotFoundError: + package += "-nightly" + is_nightly = True + try: + version = pkg_version(package) + except PackageNotFoundError: + package = "openbb-core" + version = pkg_version(package) + version += "core" + + if is_git_repo(Path(__file__).parent.resolve()) and not is_nightly: + version += "dev" + + return version + + +def is_git_repo(path: Path): + """Check if the given directory is a git repository.""" + # pylint: disable=import-outside-toplevel + import shutil + import subprocess + + git_executable = shutil.which("git") + if not git_executable: + return False + try: + subprocess.run( # noqa: S603 + [git_executable, "rev-parse", "--is-inside-work-tree"], + cwd=path, + stdout=subprocess.DEVNULL, + stderr=subprocess.DEVNULL, + check=True, + ) + return True + except subprocess.CalledProcessError: + return False + + +def get_major_minor(version: str) -> tuple[int, int]: + """Retrieve the major and minor version from a version string.""" + parts = version.split(".") + return (int(parts[0]), int(parts[1])) + + +try: + VERSION = get_package_version(PACKAGE) +except PackageNotFoundError: + VERSION = "unknown" + +try: + CORE_VERSION = get_package_version("openbb-core") +except PackageNotFoundError: + CORE_VERSION = "unknown" diff --git a/openbb_platform/core/openbb_core/build.py b/openbb_platform/core/openbb_core/build.py new file mode 100644 index 0000000000000000000000000000000000000000..af1cb731f20e6420244867e28c8bc70311553305 --- /dev/null +++ b/openbb_platform/core/openbb_core/build.py @@ -0,0 +1,96 @@ +"""Script to build the OpenBB platform static assets.""" + +# flake8: noqa: S603 +# pylint: disable=import-outside-toplevel,unused-import +import logging +import subprocess +import sys + +logger = logging.getLogger(__name__) +logger.setLevel(logging.INFO) +handler = logging.StreamHandler() +handler.setLevel(logging.INFO) +formatter = logging.Formatter("%(message)s") +handler.setFormatter(formatter) +logger.addHandler(handler) + + +def main(): + """Build the OpenBB platform static assets.""" + try: + logger.info("Attempting to import the OpenBB package...\n") + # Try importing openbb in a subprocess and capture output + result = subprocess.run( + [sys.executable, "-c", "import openbb"], + capture_output=True, + text=True, + check=True, + ) + logger.info(result.stdout) + building_found = any( + line.startswith("Building") for line in result.stdout.splitlines() + ) + + if result.returncode != 0: + raise ModuleNotFoundError(result.stderr) + + except ModuleNotFoundError: + logger.info( + "\nOpenBB build script not found, installing from PyPI...\n", + ) + subprocess.run( + [sys.executable, "-m", "pip", "install", "openbb", "--no-deps"], + check=True, + ) + + try: + result = subprocess.run( + [ + sys.executable, + "-c", + "import openbb", + ], + capture_output=True, + text=True, + check=True, + ) + logger.info(result.stdout) + building_found = any( + line.startswith("Building") for line in result.stdout.splitlines() + ) + except Exception as e: + raise RuntimeError(f"Failed to import the OpenBB package. \n{e}") from e + + if not building_found: + logger.info("Did not build on import, triggering rebuild...\n") + try: + import openbb # noqa + + openbb.build() + + except Exception as e: + raise RuntimeError( # noqa + "Failed to build the OpenBB platform static assets. \n" + f"{e} -> {e.__traceback__.tb_frame.f_code.co_filename}:" # type:ignore # pylint: disable=E1101 + f"{e.__traceback__.tb_lineno}" # type:ignore + if hasattr(e, "__traceback__") + and hasattr(e.__traceback__, "tb_frame") # type:ignore + and hasattr( + e.__traceback__.tb_frame, # type:ignore + "f_code", + ) + and hasattr( + e.__traceback__.tb_frame.f_code, # type:ignore # pylint: disable=E1101 + "co_filename", + ) + and hasattr( + e.__traceback__, # type:ignore + "tb_lineno", + ) + else f"Failed to build the OpenBB platform static assets. \n{e}" + ) from e + sys.exit(0) + + +if __name__ == "__main__": + main() diff --git a/openbb_platform/core/openbb_core/env.py b/openbb_platform/core/openbb_core/env.py new file mode 100644 index 0000000000000000000000000000000000000000..afd03b9e3ad62bd203923d8d44cdeb33b789a13a --- /dev/null +++ b/openbb_platform/core/openbb_core/env.py @@ -0,0 +1,71 @@ +"""Environment variables.""" + +import os +from pathlib import Path +from typing import Dict, Optional + +import dotenv +from openbb_core.app.constants import OPENBB_DIRECTORY +from openbb_core.app.model.abstract.singleton import SingletonMeta + + +class Env(metaclass=SingletonMeta): + """Environment variables.""" + + _environ: Dict[str, str] + + def __init__(self) -> None: + """Initialize the environment.""" + dotenv.load_dotenv(Path(OPENBB_DIRECTORY, ".env")) + self._environ = os.environ.copy() + + @property + def API_AUTH(self) -> bool: + """API authentication: enables API endpoint authentication.""" + return self.str2bool(self._environ.get("OPENBB_API_AUTH", False)) + + @property + def API_USERNAME(self) -> Optional[str]: + """API username: sets API username.""" + return self._environ.get("OPENBB_API_USERNAME", None) + + @property + def API_PASSWORD(self) -> Optional[str]: + """API password: sets API password.""" + return self._environ.get("OPENBB_API_PASSWORD", None) + + @property + def API_AUTH_EXTENSION(self) -> Optional[str]: + """Auth extension: specifies which authentication extension to use.""" + return self._environ.get("OPENBB_API_AUTH_EXTENSION", None) + + @property + def AUTO_BUILD(self) -> bool: + """Automatic build: enables automatic package build on import.""" + return self.str2bool(self._environ.get("OPENBB_AUTO_BUILD", True)) + + @property + def DEBUG_MODE(self) -> bool: + """Debug mode: enables debug mode.""" + return self.str2bool(self._environ.get("OPENBB_DEBUG_MODE", False)) + + @property + def DEV_MODE(self) -> bool: + """Dev mode: enables development mode.""" + return self.str2bool(self._environ.get("OPENBB_DEV_MODE", False)) + + @property + def HUB_BACKEND(self) -> str: + """Hub backend: sets the backend for the OpenBB Hub.""" + return self._environ.get("OPENBB_HUB_BACKEND", "https://payments.openbb.co") + + @staticmethod + def str2bool(value) -> bool: + """Match a value to its boolean correspondent.""" + if isinstance(value, bool): + return value + if value.lower() in {"false", "f", "0", "no", "n"}: + return False + if value.lower() in {"true", "t", "1", "yes", "y"}: + return True + raise ValueError(f"Failed to cast {value} to bool.") diff --git a/openbb_platform/core/openbb_core/provider/__init__.py b/openbb_platform/core/openbb_core/provider/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..096b4790bb308a54c3bdf0fd231661b97a6fc0ae --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/__init__.py @@ -0,0 +1,4 @@ +"""OpenBB Provider Package.""" + +from . import query_executor, registry, registry_map, standard_models # noqa: F401 +from .utils import descriptions, helpers # noqa: F401 diff --git a/openbb_platform/core/openbb_core/provider/abstract/__init__.py b/openbb_platform/core/openbb_core/provider/abstract/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3477d557024a400c7ba841aca8cb78b70e726b05 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/abstract/__init__.py @@ -0,0 +1 @@ +"""OpenBB Provider Abstract Class.""" diff --git a/openbb_platform/core/openbb_core/provider/abstract/annotated_result.py b/openbb_platform/core/openbb_core/provider/abstract/annotated_result.py new file mode 100644 index 0000000000000000000000000000000000000000..8d51086017b26f7e9b4830be96cd97be220ad250 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/abstract/annotated_result.py @@ -0,0 +1,20 @@ +"""Annotated result.""" + +from typing import Generic, Optional, TypeVar + +from pydantic import BaseModel, Field + +T = TypeVar("T") + + +class AnnotatedResult(BaseModel, Generic[T]): + """Annotated result allows fetchers to return metadata along with the data.""" + + result: Optional[T] = Field( + default=None, + description="Serializable results.", + ) + metadata: Optional[dict] = Field( + default=None, + description="Metadata.", + ) diff --git a/openbb_platform/core/openbb_core/provider/abstract/data.py b/openbb_platform/core/openbb_core/provider/abstract/data.py new file mode 100644 index 0000000000000000000000000000000000000000..80731a165372956ab94a0ca394305b2647ec65b2 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/abstract/data.py @@ -0,0 +1,97 @@ +"""The OpenBB Standardized Data Model.""" + +from typing import Dict + +from pydantic import ( + AliasGenerator, + BaseModel, + BeforeValidator, + ConfigDict, + alias_generators, + model_validator, +) +from typing_extensions import Annotated + + +def check_int(v: int) -> int: + """Check if the value is an int.""" + try: + return int(v) + except ValueError as exc: + raise TypeError("value must be an int") from exc + + +ForceInt = Annotated[int, BeforeValidator(check_int)] + + +class Data(BaseModel): + """ + The OpenBB Standardized Data Model. + + The `Data` class is a flexible Pydantic model designed to accommodate various data structures + for OpenBB's data processing pipeline as it's structured to support dynamic field definitions. + + The model leverages Pydantic's powerful validation features to ensure data integrity while + providing the flexibility to handle extra fields that are not explicitly defined in the model's + schema. This makes the `Data` class ideal for working with datasets that may have varying + structures or come from heterogeneous sources. + + Key Features: + - Dynamic field support: Can dynamically handle fields that are not pre-defined in the model, + allowing for great flexibility in dealing with different data shapes. + - Alias handling: Utilizes an aliasing mechanism to maintain compatibility with different naming + conventions across various data formats. + + Usage: + The `Data` class can be instantiated with keyword arguments corresponding to the fields of the + expected data. It can also parse and validate data from JSON or other serializable formats, and + convert them to a `Data` instance for easy manipulation and access. + + Example: + # Direct instantiation + data_record = Data(name="OpenBB", value=42) + + # Conversion from a dictionary + data_dict = {"name": "OpenBB", "value": 42} + data_record = Data(**data_dict) + + The class is highly extensible and can be subclassed to create more specific models tailored to + particular datasets or domains, while still benefiting from the base functionality provided by the + `Data` class. + + Attributes: + __alias_dict__ (Dict[str, str]): + A dictionary that maps field names to their aliases, + facilitating the use of different naming conventions. + model_config (ConfigDict): + A configuration dictionary that defines the model's behavior, + such as accepting extra fields, populating by name, and alias + generation. + """ + + __alias_dict__: Dict[str, str] = {} + + def __repr__(self): + """Return a string representation of the object.""" + return f"{self.__class__.__name__}({', '.join([f'{k}={v}' for k, v in super().model_dump().items()])})" + + model_config = ConfigDict( + extra="allow", + populate_by_name=True, + strict=False, + alias_generator=AliasGenerator( + validation_alias=alias_generators.to_camel, + serialization_alias=alias_generators.to_snake, + ), + ) + + @model_validator(mode="before") + @classmethod + def _use_alias(cls, values): + """Use alias for error locs.""" + # set the alias dict values keys + aliases = {orig: alias for alias, orig in cls.__alias_dict__.items()} + if aliases and isinstance(values, dict): + return {aliases.get(k, k): v for k, v in values.items()} + + return values diff --git a/openbb_platform/core/openbb_core/provider/abstract/fetcher.py b/openbb_platform/core/openbb_core/provider/abstract/fetcher.py new file mode 100644 index 0000000000000000000000000000000000000000..6cdf4f9d6d44d0a683d37ec87ad9a9ddc22e9f2e --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/abstract/fetcher.py @@ -0,0 +1,232 @@ +"""Abstract class for the fetcher.""" + +# ruff: noqa: S101, E501 +# pylint: disable=E1101, C0301 + +from typing import ( + Any, + Dict, + Generic, + Optional, + TypeVar, + Union, + get_args, + get_origin, +) + +from openbb_core.provider.abstract.annotated_result import AnnotatedResult +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.helpers import maybe_coroutine, run_async + +Q = TypeVar("Q", bound=QueryParams) +D = TypeVar("D", bound=Data) +R = TypeVar("R") # Return, usually List[D], but can be just D for example + + +class classproperty: + """Class property decorator.""" + + def __init__(self, f): + """Initialize decorator.""" + self.f = f + + def __get__(self, obj, owner): + """Get the property.""" + return self.f(owner) + + +class Fetcher(Generic[Q, R]): + """Abstract class for the fetcher.""" + + # Tell query executor if credentials are required. Can be overridden by subclasses. + require_credentials = True + + @staticmethod + def transform_query(params: Dict[str, Any]) -> Q: + """Transform the params to the provider-specific query.""" + raise NotImplementedError + + @staticmethod + async def aextract_data(query: Q, credentials: Optional[Dict[str, str]]) -> Any: + """Asynchronously extract the data from the provider.""" + + @staticmethod + def extract_data(query: Q, credentials: Optional[Dict[str, str]]) -> Any: + """Extract the data from the provider.""" + + @staticmethod + def transform_data(query: Q, data: Any, **kwargs) -> Union[R, AnnotatedResult[R]]: + """Transform the provider-specific data.""" + raise NotImplementedError + + def __init_subclass__(cls, *args, **kwargs): + """Initialize the subclass.""" + super().__init_subclass__(*args, **kwargs) + + if cls.aextract_data != Fetcher.aextract_data: + cls.extract_data = cls.aextract_data # type: ignore[method-assign] + elif cls.extract_data == Fetcher.extract_data: + raise NotImplementedError( + "Fetcher subclass must implement either extract_data or aextract_data" + " method. If both are implemented, aextract_data will be used as the" + " default." + ) + + @classmethod + async def fetch_data( + cls, + params: Dict[str, Any], + credentials: Optional[Dict[str, str]] = None, + **kwargs, + ) -> Union[R, AnnotatedResult[R]]: + """Fetch data from a provider.""" + query = cls.transform_query(params=params) + data = await maybe_coroutine( + cls.extract_data, query=query, credentials=credentials, **kwargs + ) + return cls.transform_data(query=query, data=data, **kwargs) + + @classproperty + def query_params_type(self) -> Q: + """Get the type of query.""" + # pylint: disable=E1101 + return self.__orig_bases__[0].__args__[0] # type: ignore + + @classproperty + def return_type(self) -> R: + """Get the type of return.""" + # pylint: disable=E1101 + return self.__orig_bases__[0].__args__[1] # type: ignore + + @classproperty + def data_type(self) -> D: # type: ignore + """Get the type data.""" + # pylint: disable=E1101 + return self._get_data_type(self.__orig_bases__[0].__args__[1]) # type: ignore + + @staticmethod + def _get_data_type(data: Any) -> D: # type: ignore + """Get the type of the data.""" + if get_origin(data) is list: + data = get_args(data)[0] + return data + + @classmethod + def test( + cls, + params: Dict[str, Any], + credentials: Optional[Dict[str, str]] = None, + **kwargs, + ) -> None: + """Test the fetcher. + + This method will test each stage of the fetcher TET (Transform, Extract, Transform). + + Parameters + ---------- + params : Dict[str, Any] + The params to test the fetcher with. + credentials : Optional[Dict[str, str]], optional + The credentials to test the fetcher with, by default None. + + Raises + ------ + AssertionError + If any of the tests fail. + """ + # pylint: disable=import-outside-toplevel + from pandas import DataFrame + + query = cls.transform_query(params=params) + data = run_async( + cls.extract_data, query=query, credentials=credentials, **kwargs + ) + result = cls.transform_data(query=query, data=data, **kwargs) + + # Class Assertions + assert isinstance( + cls.require_credentials, bool + ), "require_credentials must be a boolean." + + # Query Assertions + assert query, "Query must not be None." + assert issubclass( + type(query), cls.query_params_type + ), f"Query type mismatch. Expected: {cls.query_params_type} Got: {type(query)}" + assert all( + getattr(query, key) == value for key, value in params.items() + ), f"Query must have the correct values. Expected: {params} Got: {query.__dict__}" + + # Data Assertions + if not isinstance(data, DataFrame): + assert data, "Data must not be None." + else: + assert not data.empty, "Data must not be empty." + is_list = isinstance(data, list) + if is_list: + assert all( + field in data[0] + for field in cls.data_type.model_fields + if field in data[0] + ), f"Data must have the correct fields. Expected: {cls.data_type.model_fields} Got: {data[0].__dict__}" + # This makes sure that the data is not transformed yet so that the + # pipeline is implemented correctly. We can remove this assertion if we + # want to be less strict. + assert ( + issubclass(type(data[0]), cls.data_type) is False + ), f"Data must not be transformed yet. Expected: {cls.data_type} Got: {type(data[0])}" + else: + assert all( + field in data for field in cls.data_type.model_fields if field in data + ), f"Data must have the correct fields. Expected: {cls.data_type.model_fields} Got: {data.__dict__}" + assert ( + issubclass(type(data), cls.data_type) is False + ), f"Data must not be transformed yet. Expected: {cls.data_type} Got: {type(data)}" + + assert len(data) > 0, "Data must not be empty." + + # Transformed Data Assertions + transformed_data = ( + result.result if isinstance(result, AnnotatedResult) else result + ) + + assert transformed_data, "Transformed data must not be None." + + if isinstance(transformed_data, list): + return_type_args = cls.return_type.__args__[0] + return_type_is_dict = ( + hasattr(return_type_args, "__origin__") + and return_type_args.__origin__ is dict + ) + if return_type_is_dict: + return_type_fields = ( + return_type_args.__args__[1].__args__[0].model_fields + ) + return_type = return_type_args.__args__[1].__args__[0] + else: + return_type_fields = return_type_args.model_fields + return_type = return_type_args + + assert len(transformed_data) > 0, "Transformed data must not be empty." # type: ignore + assert all( + field in transformed_data[0].__dict__ for field in return_type_fields # type: ignore + ), f"Transformed data must have the correct fields. Expected: {return_type_fields} Got: {transformed_data[0].__dict__}" # type: ignore + assert issubclass( + type(transformed_data[0]), cls.data_type # type: ignore + ), f"Transformed data must be of the correct type. Expected: {cls.data_type} Got: {type(transformed_data[0])}" # type: ignore + assert issubclass( # type: ignore + type(transformed_data[0]), # type: ignore + return_type, + ), f"Transformed data must be of the correct type. Expected: {return_type} Got: {type(transformed_data[0])}" # type: ignore + else: + assert all( + field in transformed_data.__dict__ + for field in cls.return_type.model_fields + ), f"Transformed data must have the correct fields. Expected: {cls.return_type.model_fields} Got: {transformed_data.__dict__}" + assert issubclass( + type(transformed_data), cls.data_type + ), f"Transformed data must be of the correct type. Expected: {cls.data_type} Got: {type(transformed_data)}" + assert issubclass( + type(transformed_data), cls.return_type + ), f"Transformed data must be of the correct type. Expected: {cls.return_type} Got: {type(transformed_data)}" diff --git a/openbb_platform/core/openbb_core/provider/abstract/provider.py b/openbb_platform/core/openbb_core/provider/abstract/provider.py new file mode 100644 index 0000000000000000000000000000000000000000..4858471d44245b9c2311dae59ca2b06f4708d74a --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/abstract/provider.py @@ -0,0 +1,56 @@ +"""Provider Abstract Class.""" + +from typing import Dict, List, Optional, Type + +from openbb_core.provider.abstract.fetcher import Fetcher + + +class Provider: + """Serves as provider extension entry point and must be created by each provider.""" + + # pylint: disable=too-many-arguments + def __init__( + self, + name: str, + description: str, + website: Optional[str] = None, + credentials: Optional[List[str]] = None, + fetcher_dict: Optional[Dict[str, Type[Fetcher]]] = None, + repr_name: Optional[str] = None, + deprecated_credentials: Optional[Dict[str, Optional[str]]] = None, + instructions: Optional[str] = None, + ) -> None: + """Initialize the provider. + + Parameters + ---------- + name : str + Name of the provider. + description : str + Description of the provider. + website : Optional[str] + Website of the provider, by default None. + credentials : Optional[List[str]] + List of required credentials, by default None. + fetcher_dict : Optional[Dict[str, Type[Fetcher]]] + Dictionary of fetchers, by default None. + repr_name: Optional[str] + Full name of the provider, by default None. + deprecated_credentials: Optional[Dict[str, Optional[str]]] + Map of deprecated credentials to its current name, by default None. + instructions: Optional[str] + Instructions on how to setup the provider. For example, how to get an API key. + """ + self.name = name + self.description = description + self.website = website + self.fetcher_dict = fetcher_dict or {} + if credentials is None: + self.credentials: List = [] + else: + self.credentials = [] + for c in credentials: + self.credentials.append(f"{self.name.lower()}_{c}") + self.repr_name = repr_name + self.deprecated_credentials = deprecated_credentials + self.instructions = instructions diff --git a/openbb_platform/core/openbb_core/provider/abstract/query_params.py b/openbb_platform/core/openbb_core/provider/abstract/query_params.py new file mode 100644 index 0000000000000000000000000000000000000000..be10b82f2f3736c301f2dc05180038ca123349e8 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/abstract/query_params.py @@ -0,0 +1,71 @@ +"""The OpenBB Standardized QueryParams Model that holds the query input parameters.""" + +from typing import Any, Dict + +from pydantic import BaseModel, ConfigDict + + +class QueryParams(BaseModel): + """The OpenBB Standardized QueryParams Model. + + The `QueryParams` class is designed to hold query parameters, to be extended by + providers and to be used by fetchers when making data provider requests. + + Key Features: + - Alias handling: Utilizes an aliasing mechanism to maintain compatibility with different naming + conventions across various data formats. The alias is only applied when running `model_dump`. + - Json schema extra merging: + + Merge different json schema extra, identified by provider. + Example: + FMP fetcher: + __json_schema_extra__ = {"symbol": {"multiple_items_allowed": True}} + Intrinio fetcher + __json_schema_extra__ = {"symbol": {"multiple_items_allowed": False}} + + Creates new fields in the `symbol` schema: + { + "type": "string", + "description": "Symbol to get data for.", + "fmp": {"multiple_items_allowed": True}, + "intrinio": {"multiple_items_allowed": False} + ..., + } + + Multiple fields can be tagged with the same or multiple properties. + Example: + __json_schema_extra__ = { + "": {"foo": 123, "bar": 456}, + "": {"foo": 789} + } + + Attributes: + __alias_dict__ (Dict[str, str]): + A dictionary that maps field names to their aliases, + facilitating the use of different naming conventions. + __json_schema_extra__ (Dict[str, List[str]]): + Properties to be included in the json schema extra. + model_config (ConfigDict): + A configuration dictionary that defines the model's behavior, + such as accepting extra fields, populating by name, and alias + generation. + """ + + __alias_dict__: Dict[str, str] = {} + __json_schema_extra__: Dict[str, Any] = {} + + def __repr__(self): + """Return the string representation of the QueryParams object.""" + return f"{self.__class__.__name__}({', '.join([f'{k}={v}' for k, v in self.model_dump().items()])})" + + model_config = ConfigDict(extra="allow", populate_by_name=True) + + def model_dump(self, *args, **kwargs): + """Dump the model.""" + original = super().model_dump(*args, **kwargs) + if self.__alias_dict__: + return { + self.__alias_dict__.get(key, key): value + for key, value in original.items() + } + return original diff --git a/openbb_platform/core/openbb_core/provider/py.typed b/openbb_platform/core/openbb_core/provider/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/openbb_platform/core/openbb_core/provider/query_executor.py b/openbb_platform/core/openbb_core/provider/query_executor.py new file mode 100644 index 0000000000000000000000000000000000000000..75a85df5363a798daa2e36d90b4b3ccb00204902 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/query_executor.py @@ -0,0 +1,98 @@ +"""Query executor module.""" + +from typing import Any, Dict, Optional, Type + +from openbb_core.app.model.abstract.error import OpenBBError +from openbb_core.provider.abstract.fetcher import Fetcher +from openbb_core.provider.abstract.provider import Provider +from openbb_core.provider.registry import Registry, RegistryLoader +from pydantic import SecretStr + + +class QueryExecutor: + """Class to execute queries from providers.""" + + def __init__(self, registry: Optional[Registry] = None) -> None: + """Initialize the query executor.""" + self.registry = registry or RegistryLoader.from_extensions() + + def get_provider(self, provider_name: str) -> Provider: + """Get a provider from the registry.""" + name = provider_name.lower() + if name not in self.registry.providers: + raise OpenBBError( + f"Provider '{name}' not found in the registry." + f"Available providers: {list(self.registry.providers.keys())}" + ) + return self.registry.providers[name] + + def get_fetcher(self, provider: Provider, model_name: str) -> Type[Fetcher]: + """Get a fetcher from a provider.""" + if model_name not in provider.fetcher_dict: + raise OpenBBError( + f"Fetcher not found for model '{model_name}' in provider '{provider.name}'." + ) + return provider.fetcher_dict[model_name] + + @staticmethod + def filter_credentials( + credentials: Optional[Dict[str, SecretStr]], + provider: Provider, + require_credentials: bool, + ) -> Dict[str, str]: + """Filter credentials and check if they match provider requirements.""" + filtered_credentials = {} + + if provider.credentials: + if credentials is None: + credentials = {} + + for c in provider.credentials: + v = credentials.get(c) + secret = v.get_secret_value() if v else None + if c not in credentials or not secret: + if require_credentials: + website = provider.website or "" + extra_msg = f" Check {website} to get it." if website else "" + raise OpenBBError( + f"Missing credential '{c}'.{extra_msg} Known more about how to set provider " + "credentials at https://docs.openbb.co/platform/getting_started/api_keys." + ) + else: + filtered_credentials[c] = secret + + return filtered_credentials + + async def execute( + self, + provider_name: str, + model_name: str, + params: Dict[str, Any], + credentials: Optional[Dict[str, SecretStr]] = None, + **kwargs: Any, + ) -> Any: + """Execute query. + + Parameters + ---------- + provider_name : str + Name of the provider, for example: "fmp". + model_name : str + Name of the model, for example: "EquityHistorical". + params : Dict[str, Any] + Query parameters, for example: {"symbol": "AAPL"} + credentials : Optional[Dict[str, SecretStr]], optional + Credentials for the provider, by default None + For example, {"fmp_api_key": SecretStr("1234")}. + + Returns + ------- + Any + Query result. + """ + provider = self.get_provider(provider_name) + fetcher = self.get_fetcher(provider, model_name) + filtered_credentials = self.filter_credentials( + credentials, provider, fetcher.require_credentials + ) + return await fetcher.fetch_data(params, filtered_credentials, **kwargs) diff --git a/openbb_platform/core/openbb_core/provider/registry.py b/openbb_platform/core/openbb_core/provider/registry.py new file mode 100644 index 0000000000000000000000000000000000000000..6aa381f60d0ed10abaaedd9ce3894268c71fdea0 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/registry.py @@ -0,0 +1,56 @@ +"""Provider Registry Module.""" + +import traceback +import warnings +from functools import lru_cache +from typing import Dict + +from openbb_core.app.extension_loader import ExtensionLoader +from openbb_core.app.model.abstract.warning import OpenBBWarning +from openbb_core.env import Env +from openbb_core.provider.abstract.provider import Provider + + +class Registry: + """Maintain registry of providers.""" + + def __init__(self) -> None: + """Initialize the registry.""" + self._providers: Dict[str, Provider] = {} + + @property + def providers(self): + """Return a dictionary of providers.""" + return self._providers + + def include_provider(self, provider: Provider) -> None: + """Include a provider in the registry.""" + self._providers[provider.name.lower()] = provider + + +class LoadingError(Exception): + """Error loading provider.""" + + +class RegistryLoader: + """Load providers from entry points.""" + + @staticmethod + @lru_cache + def from_extensions() -> Registry: + """Load providers from entry points.""" + registry = Registry() + + for name, entry in ExtensionLoader().provider_objects.items(): # type: ignore[attr-defined] + try: + registry.include_provider(provider=entry) + except Exception as e: + msg = f"Error loading extension: {name}\n" + if Env().DEBUG_MODE: + traceback.print_exception(type(e), e, e.__traceback__) + raise LoadingError(msg + f"\033[91m{e}\033[0m") from e + warnings.warn( + message=msg, + category=OpenBBWarning, + ) + return registry diff --git a/openbb_platform/core/openbb_core/provider/registry_map.py b/openbb_platform/core/openbb_core/provider/registry_map.py new file mode 100644 index 0000000000000000000000000000000000000000..e55b7f0b656583ca5d8a8b5e3470f823b10f596a --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/registry_map.py @@ -0,0 +1,207 @@ +"""Provider registry map.""" + +from copy import deepcopy +from inspect import getfile, isclass +from pathlib import Path +from typing import Any, Dict, List, Literal, Optional, Tuple, get_origin + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.fetcher import Fetcher +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.registry import Registry, RegistryLoader +from pydantic import BaseModel + +MapType = Dict[str, Dict[str, Dict[str, Dict[str, Any]]]] + +STANDARD_MODELS_FOLDER = Path(__file__).parent / "standard_models" +SKIP = {"object", "Representation", "BaseModel", "QueryParams", "Data"} + + +class RegistryMap: + """Class to store information about providers in the registry.""" + + def __init__(self, registry: Optional[Registry] = None) -> None: + """Initialize Registry Map.""" + self._registry = registry or RegistryLoader.from_extensions() + self._credentials = self._get_credentials(self._registry) + self._available_providers = self._get_available_providers(self._registry) + self._standard_extra, self._original_models = self._get_maps(self._registry) + self._models = self._get_models(self._standard_extra) + + @property + def registry(self) -> Registry: + """Get the registry.""" + return self._registry + + @property + def available_providers(self) -> List[str]: + """Get list of available providers.""" + return self._available_providers + + @property + def credentials(self) -> Dict[str, List[str]]: + """Get map of providers to credentials.""" + return self._credentials + + @property + def standard_extra(self) -> MapType: + """Get standard extra map.""" + return self._standard_extra + + @property + def original_models(self) -> MapType: + """Get original models.""" + return self._original_models + + @property + def models(self) -> List[str]: + """Get available models.""" + return self._models + + def _get_credentials(self, registry: Registry) -> Dict[str, List[str]]: + """Get map of providers to credentials.""" + return { + name: provider.credentials for name, provider in registry.providers.items() + } + + def _get_available_providers(self, registry: Registry) -> List[str]: + """Get list of available providers.""" + return sorted(list(registry.providers.keys())) + + def _get_maps(self, registry: Registry) -> Tuple[MapType, Dict[str, Dict]]: + """Generate map for the provider package.""" + standard_extra: MapType = {} + original_models: Dict[str, Dict] = {} + + for p in registry.providers: + for model_name, fetcher in registry.providers[p].fetcher_dict.items(): + standard_query, extra_query = self._extract_info( + fetcher, "query_params" + ) + standard_data, extra_data = self._extract_info(fetcher, "data") + if model_name not in standard_extra: + standard_extra[model_name] = {} + # The deepcopy avoids modifications from one model to affect another + standard_extra[model_name]["openbb"] = { + "QueryParams": deepcopy(standard_query), + "Data": deepcopy(standard_data), + } + standard_extra[model_name][p] = { + "QueryParams": extra_query, + "Data": extra_data, + } + + original_models.setdefault(model_name, {}).update( + { + p: { + "query": self._get_model(fetcher, "query_params"), + "data": self._get_model(fetcher, "data"), + "results_type": self._get_results_type(fetcher), + } + } + ) + + self._update_json_schema_extra(p, fetcher, standard_extra[model_name]) + + return standard_extra, original_models + + def _update_json_schema_extra( + self, + provider: str, + fetcher: Fetcher, + model_map: dict, + ): + """Merge json schema extra for different providers.""" + model: BaseModel = RegistryMap._get_model(fetcher, "query_params") + standard_fields = model_map["openbb"]["QueryParams"]["fields"] + extra_fields = model_map[provider]["QueryParams"]["fields"] + + for field, properties in getattr(model, "__json_schema_extra__", {}).items(): + if properties: + if field in standard_fields: + model_field = standard_fields[field] + elif field in extra_fields: + model_field = extra_fields[field] + else: + continue + + if model_field.json_schema_extra is None: + model_field.json_schema_extra = {} + + model_field.json_schema_extra[provider] = properties + + def _get_models(self, map_: MapType) -> List[str]: + """Get available models.""" + return list(map_.keys()) + + @staticmethod + def _get_results_type(fetcher: Fetcher) -> Any: + """Extract return info from fetcher.""" + return get_origin(getattr(fetcher, "return_type", None)) + + @staticmethod + def _extract_info( + fetcher: Fetcher, type_: Literal["query_params", "data"] + ) -> tuple: + """Extract info (fields and docstring) from fetcher query params or data.""" + model: BaseModel = RegistryMap._get_model(fetcher, type_) + standard_info: Dict[str, Any] = {"fields": {}, "docstring": None} + extra_info: Dict[str, Any] = {"fields": {}, "docstring": model.__doc__} + found_first_standard = False + + family = RegistryMap._get_class_family(model) + for i, child in enumerate(family): + if child.__name__ in SKIP: + continue + + parent = family[i + 1] if family[i + 1] not in SKIP else BaseModel + + fields = { + name: field + for name, field in child.model_fields.items() + # This ensures fields inherited by c are discarded. + # We need to compare child and parent __annotations__ + # because this attribute is redirected to the parent class + # when the child simply inherits the parent and does not + # define any attributes. + # TLDR: Only fields defined in c are included + if name in child.__annotations__ + and child.__annotations__ is not parent.__annotations__ + } + + if Path(getfile(child)).parent == STANDARD_MODELS_FOLDER: + if not found_first_standard: + # If standard uses inheritance we just use the first docstring + standard_info["docstring"] = child.__doc__ + found_first_standard = True + standard_info["fields"].update(fields) + else: + extra_info["fields"].update(fields) + + return standard_info, extra_info + + @staticmethod + def _get_model( + fetcher: Fetcher, type_: Literal["query_params", "data"] + ) -> BaseModel: + """Get model from fetcher.""" + model = getattr(fetcher, f"{type_}_type") + RegistryMap._validate(model, type_) + return model + + @staticmethod + def _validate(model: Any, type_: Literal["query_params", "data"]) -> None: + """Validate model.""" + parent_model = QueryParams if type_ == "query_params" else Data + if not isclass(model) or not issubclass(model, parent_model): + model_str = str(model).replace("<", "<'").replace(">", "'>") + raise ValueError( + f"'{model_str}' must be a subclass of '{parent_model.__name__}'.\n" + "If you are returning a nested type, try specifying" + f" `{type_}_type = <'your_{type_}_type'>` in the fetcher." + ) + + @staticmethod + def _get_class_family(class_) -> tuple: + """Return the class family starting with the class itself until `object`.""" + return getattr(class_, "__mro__", ()) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/__init__.py b/openbb_platform/core/openbb_core/provider/standard_models/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..c0c09c5d39807b6f2591daa76343e4f42d4b10b5 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/__init__.py @@ -0,0 +1 @@ +"""Standard models for OpenBB Provider.""" diff --git a/openbb_platform/core/openbb_core/provider/standard_models/ameribor.py b/openbb_platform/core/openbb_core/provider/standard_models/ameribor.py new file mode 100644 index 0000000000000000000000000000000000000000..9eed2bf7d00d9f8ba062850f60f1ce87e33b772e --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/ameribor.py @@ -0,0 +1,43 @@ +"""AMERIBOR Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class AmeriborQueryParams(QueryParams): + """AMERIBOR Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class AmeriborData(Data): + """AMERIBOR Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + symbol: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("symbol", "") + ) + maturity: str = Field(description="Maturity length of the item.") + rate: float = Field( + description="Interest rate.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + title: Optional[str] = Field( + default=None, + description="Title of the series.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/analyst_estimates.py b/openbb_platform/core/openbb_core/provider/standard_models/analyst_estimates.py new file mode 100644 index 0000000000000000000000000000000000000000..824b596835de532919dd28fe3125aea389ba4e45 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/analyst_estimates.py @@ -0,0 +1,91 @@ +"""Analyst Estimates Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data, ForceInt +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class AnalystEstimatesQueryParams(QueryParams): + """Analyst Estimates Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class AnalystEstimatesData(Data): + """Analyst Estimates data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + estimated_revenue_low: Optional[ForceInt] = Field( + default=None, description="Estimated revenue low." + ) + estimated_revenue_high: Optional[ForceInt] = Field( + default=None, description="Estimated revenue high." + ) + estimated_revenue_avg: Optional[ForceInt] = Field( + default=None, description="Estimated revenue average." + ) + estimated_sga_expense_low: Optional[ForceInt] = Field( + default=None, description="Estimated SGA expense low." + ) + estimated_sga_expense_high: Optional[ForceInt] = Field( + default=None, description="Estimated SGA expense high." + ) + estimated_sga_expense_avg: Optional[ForceInt] = Field( + default=None, description="Estimated SGA expense average." + ) + estimated_ebitda_low: Optional[ForceInt] = Field( + default=None, description="Estimated EBITDA low." + ) + estimated_ebitda_high: Optional[ForceInt] = Field( + default=None, description="Estimated EBITDA high." + ) + estimated_ebitda_avg: Optional[ForceInt] = Field( + default=None, description="Estimated EBITDA average." + ) + estimated_ebit_low: Optional[ForceInt] = Field( + default=None, description="Estimated EBIT low." + ) + estimated_ebit_high: Optional[ForceInt] = Field( + default=None, description="Estimated EBIT high." + ) + estimated_ebit_avg: Optional[ForceInt] = Field( + default=None, description="Estimated EBIT average." + ) + estimated_net_income_low: Optional[ForceInt] = Field( + default=None, description="Estimated net income low." + ) + estimated_net_income_high: Optional[ForceInt] = Field( + default=None, description="Estimated net income high." + ) + estimated_net_income_avg: Optional[ForceInt] = Field( + default=None, description="Estimated net income average." + ) + estimated_eps_avg: Optional[float] = Field( + default=None, description="Estimated EPS average." + ) + estimated_eps_high: Optional[float] = Field( + default=None, description="Estimated EPS high." + ) + estimated_eps_low: Optional[float] = Field( + default=None, description="Estimated EPS low." + ) + number_analyst_estimated_revenue: Optional[ForceInt] = Field( + default=None, description="Number of analysts who estimated revenue." + ) + number_analysts_estimated_eps: Optional[ForceInt] = Field( + default=None, description="Number of analysts who estimated EPS." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/analyst_search.py b/openbb_platform/core/openbb_core/provider/standard_models/analyst_search.py new file mode 100644 index 0000000000000000000000000000000000000000..27921d7e736e6295c04a3e0a51cc147bf654bb46 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/analyst_search.py @@ -0,0 +1,49 @@ +"""Analyst Search Standard Model.""" + +from datetime import ( + datetime, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from pydantic import Field + + +class AnalystSearchQueryParams(QueryParams): + """Analyst Search Query.""" + + analyst_name: Optional[str] = Field( + default=None, + description="Analyst names to return." + + " Omitting will return all available analysts.", + ) + firm_name: Optional[str] = Field( + default=None, + description="Firm names to return." + + " Omitting will return all available firms.", + ) + + +class AnalystSearchData(Data): + """Analyst Search data.""" + + last_updated: Optional[datetime] = Field( + default=None, + description="Date of the last update.", + ) + firm_name: Optional[str] = Field( + default=None, + description="Firm name of the analyst.", + ) + name_first: Optional[str] = Field( + default=None, + description="Analyst first name.", + ) + name_last: Optional[str] = Field( + default=None, + description="Analyst last name.", + ) + name_full: str = Field( + description="Analyst full name.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/available_indicators.py b/openbb_platform/core/openbb_core/provider/standard_models/available_indicators.py new file mode 100644 index 0000000000000000000000000000000000000000..aa526a9b95d026efb0cba38bfaeb2374c41c7ad3 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/available_indicators.py @@ -0,0 +1,42 @@ +"""Available Indicators Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class AvailableIndicesQueryParams(QueryParams): + """Available Indicators Query.""" + + +class AvailableIndicatorsData(Data): + """Available Indicators Data. + + Returns the list of available economic indicators from a provider. + """ + + symbol_root: Optional[str] = Field( + default=None, description="The root symbol representing the indicator." + ) + symbol: Optional[str] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("symbol", "") + + " The root symbol with additional codes.", + ) + country: Optional[str] = Field( + default=None, + description="The name of the country, region, or entity represented by the symbol.", + ) + iso: Optional[str] = Field( + default=None, + description="The ISO code of the country, region, or entity represented by the symbol.", + ) + description: Optional[str] = Field( + default=None, description="The description of the indicator." + ) + frequency: Optional[str] = Field( + default=None, description="The frequency of the indicator data." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/available_indices.py b/openbb_platform/core/openbb_core/provider/standard_models/available_indices.py new file mode 100644 index 0000000000000000000000000000000000000000..29e42709fdb729f3aa9b2835f2ebaf9c48ddc643 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/available_indices.py @@ -0,0 +1,23 @@ +"""Available Indices Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from pydantic import Field + + +class AvailableIndicesQueryParams(QueryParams): + """Available Indices Query.""" + + +class AvailableIndicesData(Data): + """Available Indices Data. + + Returns the list of available indices from a provider. + """ + + name: Optional[str] = Field(default=None, description="Name of the index.") + currency: Optional[str] = Field( + default=None, description="Currency the index is traded in." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/balance_of_payments.py b/openbb_platform/core/openbb_core/provider/standard_models/balance_of_payments.py new file mode 100644 index 0000000000000000000000000000000000000000..16169746ac1112b9fa168a8b4e089bfe34e21e56 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/balance_of_payments.py @@ -0,0 +1,574 @@ +"""Balance of Payments Model.""" + +from datetime import ( + date as dateType, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from pydantic import Field + + +class BalanceOfPaymentsQueryParams(QueryParams): + """Balance Of Payments Query.""" + + +class BP6BopUsdData(Data): + """OECD BP6 Balance of Payments Items, in USD.""" + + period: dateType = Field( + default=None, + description="The date representing the beginning of the reporting period.", + ) + balance_percent_of_gdp: Optional[float] = Field( + default=None, + description="Current Account Balance as Percent of GDP", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + balance_total: Optional[float] = Field( + default=None, description="Current Account Total Balance (USD)" + ) + balance_total_services: Optional[float] = Field( + default=None, description="Current Account Total Services Balance (USD)" + ) + balance_total_secondary_income: Optional[float] = Field( + default=None, description="Current Account Total Secondary Income Balance (USD)" + ) + balance_total_goods: Optional[float] = Field( + default=None, description="Current Account Total Goods Balance (USD)" + ) + balance_total_primary_income: Optional[float] = Field( + default=None, description="Current Account Total Primary Income Balance (USD)" + ) + credits_services_percent_of_goods_and_services: Optional[float] = Field( + default=None, + description="Current Account Credits Services as Percent of Goods and Services", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + credits_services_percent_of_current_account: Optional[float] = Field( + default=None, + description="Current Account Credits Services as Percent of Current Account", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + credits_total_services: Optional[float] = Field( + default=None, description="Current Account Credits Total Services (USD)" + ) + credits_total_goods: Optional[float] = Field( + default=None, description="Current Account Credits Total Goods (USD)" + ) + credits_total_primary_income: Optional[float] = Field( + default=None, description="Current Account Credits Total Primary Income (USD)" + ) + credits_total_secondary_income: Optional[float] = Field( + default=None, description="Current Account Credits Total Secondary Income (USD)" + ) + credits_total: Optional[float] = Field( + default=None, description="Current Account Credits Total (USD)" + ) + debits_services_percent_of_goods_and_services: Optional[float] = Field( + default=None, + description="Current Account Debits Services as Percent of Goods and Services", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + debits_services_percent_of_current_account: Optional[float] = Field( + default=None, + description="Current Account Debits Services as Percent of Current Account", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + debits_total_services: Optional[float] = Field( + default=None, description="Current Account Debits Total Services (USD)" + ) + debits_total_goods: Optional[float] = Field( + default=None, description="Current Account Debits Total Goods (USD)" + ) + debits_total_primary_income: Optional[float] = Field( + default=None, description="Current Account Debits Total Primary Income (USD)" + ) + debits_total: Optional[float] = Field( + default=None, description="Current Account Debits Total (USD)" + ) + debits_total_secondary_income: Optional[float] = Field( + default=None, description="Current Account Debits Total Secondary Income (USD)" + ) + + +class ECBMain(Data): + """ECB Main Balance of Payments Items.""" + + period: dateType = Field( + default=None, + description="The date representing the beginning of the reporting period.", + ) + current_account: Optional[float] = Field( + default=None, description="Current Account Balance (Billions of EUR)" + ) + goods: Optional[float] = Field( + default=None, description="Goods Balance (Billions of EUR)" + ) + services: Optional[float] = Field( + default=None, description="Services Balance (Billions of EUR)" + ) + primary_income: Optional[float] = Field( + default=None, description="Primary Income Balance (Billions of EUR)" + ) + secondary_income: Optional[float] = Field( + default=None, description="Secondary Income Balance (Billions of EUR)" + ) + capital_account: Optional[float] = Field( + default=None, description="Capital Account Balance (Billions of EUR)" + ) + net_lending_to_rest_of_world: Optional[float] = Field( + default=None, + description="Balance of net lending to the rest of the world (Billions of EUR)", + ) + financial_account: Optional[float] = Field( + default=None, description="Financial Account Balance (Billions of EUR)" + ) + direct_investment: Optional[float] = Field( + default=None, description="Direct Investment Balance (Billions of EUR)" + ) + portfolio_investment: Optional[float] = Field( + default=None, description="Portfolio Investment Balance (Billions of EUR)" + ) + financial_derivatives: Optional[float] = Field( + default=None, description="Financial Derivatives Balance (Billions of EUR)" + ) + other_investment: Optional[float] = Field( + default=None, description="Other Investment Balance (Billions of EUR)" + ) + reserve_assets: Optional[float] = Field( + default=None, description="Reserve Assets Balance (Billions of EUR)" + ) + errors_and_ommissions: Optional[float] = Field( + default=None, description="Errors and Omissions (Billions of EUR)" + ) + + +class ECBSummary(Data): + """ECB Summary Balance of Payments Items.""" + + period: dateType = Field( + default=None, + description="The date representing the beginning of the reporting period.", + ) + current_account_credit: Optional[float] = Field( + default=None, description="Current Account Credit (Billions of EUR)" + ) + current_account_debit: Optional[float] = Field( + default=None, description="Current Account Debit (Billions of EUR)" + ) + current_account_balance: Optional[float] = Field( + default=None, description="Current Account Balance (Billions of EUR)" + ) + goods_credit: Optional[float] = Field( + default=None, description="Goods Credit (Billions of EUR)" + ) + goods_debit: Optional[float] = Field( + default=None, description="Goods Debit (Billions of EUR)" + ) + services_credit: Optional[float] = Field( + default=None, description="Services Credit (Billions of EUR)" + ) + services_debit: Optional[float] = Field( + default=None, description="Services Debit (Billions of EUR)" + ) + primary_income_credit: Optional[float] = Field( + default=None, description="Primary Income Credit (Billions of EUR)" + ) + primary_income_employee_compensation_credit: Optional[float] = Field( + default=None, + description="Primary Income Employee Compensation Credit (Billions of EUR)", + ) + primary_income_debit: Optional[float] = Field( + default=None, description="Primary Income Debit (Billions of EUR)" + ) + primary_income_employee_compensation_debit: Optional[float] = Field( + default=None, + description="Primary Income Employee Compensation Debit (Billions of EUR)", + ) + secondary_income_credit: Optional[float] = Field( + default=None, description="Secondary Income Credit (Billions of EUR)" + ) + secondary_income_debit: Optional[float] = Field( + default=None, description="Secondary Income Debit (Billions of EUR)" + ) + capital_account_credit: Optional[float] = Field( + default=None, description="Capital Account Credit (Billions of EUR)" + ) + capital_account_debit: Optional[float] = Field( + default=None, description="Capital Account Debit (Billions of EUR)" + ) + + +class ECBServices(Data): + """ECB Services Balance of Payments Items.""" + + period: dateType = Field( + default=None, + description="The date representing the beginning of the reporting period.", + ) + services_total_credit: Optional[float] = Field( + default=None, description="Services Total Credit (Billions of EUR)" + ) + services_total_debit: Optional[float] = Field( + default=None, description="Services Total Debit (Billions of EUR)" + ) + transport_credit: Optional[float] = Field( + default=None, description="Transport Credit (Billions of EUR)" + ) + transport_debit: Optional[float] = Field( + default=None, description="Transport Debit (Billions of EUR)" + ) + travel_credit: Optional[float] = Field( + default=None, description="Travel Credit (Billions of EUR)" + ) + travel_debit: Optional[float] = Field( + default=None, description="Travel Debit (Billions of EUR)" + ) + financial_services_credit: Optional[float] = Field( + default=None, description="Financial Services Credit (Billions of EUR)" + ) + financial_services_debit: Optional[float] = Field( + default=None, description="Financial Services Debit (Billions of EUR)" + ) + communications_credit: Optional[float] = Field( + default=None, description="Communications Credit (Billions of EUR)" + ) + communications_debit: Optional[float] = Field( + default=None, description="Communications Debit (Billions of EUR)" + ) + other_business_services_credit: Optional[float] = Field( + default=None, description="Other Business Services Credit (Billions of EUR)" + ) + other_business_services_debit: Optional[float] = Field( + default=None, description="Other Business Services Debit (Billions of EUR)" + ) + other_services_credit: Optional[float] = Field( + default=None, description="Other Services Credit (Billions of EUR)" + ) + other_services_debit: Optional[float] = Field( + default=None, description="Other Services Debit (Billions of EUR)" + ) + + +class ECBInvestmentIncome(Data): + """ECB Investment Income Balance of Payments Items.""" + + period: dateType = Field( + default=None, + description="The date representing the beginning of the reporting period.", + ) + investment_total_credit: Optional[float] = Field( + default=None, description="Investment Total Credit (Billions of EUR)" + ) + investment_total_debit: Optional[float] = Field( + default=None, description="Investment Total Debit (Billions of EUR)" + ) + equity_credit: Optional[float] = Field( + default=None, description="Equity Credit (Billions of EUR)" + ) + equity_reinvested_earnings_credit: Optional[float] = Field( + default=None, description="Equity Reinvested Earnings Credit (Billions of EUR)" + ) + equity_debit: Optional[float] = Field( + default=None, description="Equity Debit (Billions of EUR)" + ) + equity_reinvested_earnings_debit: Optional[float] = Field( + default=None, description="Equity Reinvested Earnings Debit (Billions of EUR)" + ) + debt_instruments_credit: Optional[float] = Field( + default=None, description="Debt Instruments Credit (Billions of EUR)" + ) + debt_instruments_debit: Optional[float] = Field( + default=None, description="Debt Instruments Debit (Billions of EUR)" + ) + portfolio_investment_equity_credit: Optional[float] = Field( + default=None, description="Portfolio Investment Equity Credit (Billions of EUR)" + ) + portfolio_investment_equity_debit: Optional[float] = Field( + default=None, description="Portfolio Investment Equity Debit (Billions of EUR)" + ) + portfolio_investment_debt_instruments_credit: Optional[float] = Field( + default=None, + description="Portfolio Investment Debt Instruments Credit (Billions of EUR)", + ) + portofolio_investment_debt_instruments_debit: Optional[float] = Field( + default=None, + description="Portfolio Investment Debt Instruments Debit (Billions of EUR)", + ) + other_investment_credit: Optional[float] = Field( + default=None, description="Other Investment Credit (Billions of EUR)" + ) + other_investment_debit: Optional[float] = Field( + default=None, description="Other Investment Debit (Billions of EUR)" + ) + reserve_assets_credit: Optional[float] = Field( + default=None, description="Reserve Assets Credit (Billions of EUR)" + ) + + +class ECBDirectInvestment(Data): + """ECB Direct Investment Balance of Payments Items.""" + + period: dateType = Field( + default=None, + description="The date representing the beginning of the reporting period.", + ) + assets_total: Optional[float] = Field( + default=None, description="Assets Total (Billions of EUR)" + ) + assets_equity: Optional[float] = Field( + default=None, description="Assets Equity (Billions of EUR)" + ) + assets_debt_instruments: Optional[float] = Field( + default=None, description="Assets Debt Instruments (Billions of EUR)" + ) + assets_mfi: Optional[float] = Field( + default=None, description="Assets MFIs (Billions of EUR)" + ) + assets_non_mfi: Optional[float] = Field( + default=None, description="Assets Non MFIs (Billions of EUR)" + ) + assets_direct_investment_abroad: Optional[float] = Field( + default=None, description="Assets Direct Investment Abroad (Billions of EUR)" + ) + liabilities_total: Optional[float] = Field( + default=None, description="Liabilities Total (Billions of EUR)" + ) + liabilities_equity: Optional[float] = Field( + default=None, description="Liabilities Equity (Billions of EUR)" + ) + liabilities_debt_instruments: Optional[float] = Field( + default=None, description="Liabilities Debt Instruments (Billions of EUR)" + ) + liabilities_mfi: Optional[float] = Field( + default=None, description="Liabilities MFIs (Billions of EUR)" + ) + liabilities_non_mfi: Optional[float] = Field( + default=None, description="Liabilities Non MFIs (Billions of EUR)" + ) + liabilities_direct_investment_euro_area: Optional[float] = Field( + default=None, + description="Liabilities Direct Investment in Euro Area (Billions of EUR)", + ) + + +class ECBPortfolioInvestment(Data): + """ECB Portfolio Investment Balance of Payments Items.""" + + period: dateType = Field( + default=None, + description="The date representing the beginning of the reporting period.", + ) + assets_total: Optional[float] = Field( + default=None, description="Assets Total (Billions of EUR)" + ) + assets_equity_and_fund_shares: Optional[float] = Field( + default=None, + description="Assets Equity and Investment Fund Shares (Billions of EUR)", + ) + assets_equity_shares: Optional[float] = Field( + default=None, description="Assets Equity Shares (Billions of EUR)" + ) + assets_investment_fund_shares: Optional[float] = Field( + default=None, description="Assets Investment Fund Shares (Billions of EUR)" + ) + assets_debt_short_term: Optional[float] = Field( + default=None, description="Assets Debt Short Term (Billions of EUR)" + ) + assets_debt_long_term: Optional[float] = Field( + default=None, description="Assets Debt Long Term (Billions of EUR)" + ) + assets_resident_sector_eurosystem: Optional[float] = Field( + default=None, description="Assets Resident Sector Eurosystem (Billions of EUR)" + ) + assets_resident_sector_mfi_ex_eurosystem: Optional[float] = Field( + default=None, + description="Assets Resident Sector MFIs outside Eurosystem (Billions of EUR)", + ) + assets_resident_sector_government: Optional[float] = Field( + default=None, description="Assets Resident Sector Government (Billions of EUR)" + ) + assets_resident_sector_other: Optional[float] = Field( + default=None, description="Assets Resident Sector Other (Billions of EUR)" + ) + liabilities_total: Optional[float] = Field( + default=None, description="Liabilities Total (Billions of EUR)" + ) + liabilities_equity_and_fund_shares: Optional[float] = Field( + default=None, + description="Liabilities Equity and Investment Fund Shares (Billions of EUR)", + ) + liabilities_equity: Optional[float] = Field( + default=None, description="Liabilities Equity (Billions of EUR)" + ) + liabilities_investment_fund_shares: Optional[float] = Field( + default=None, description="Liabilities Investment Fund Shares (Billions of EUR)" + ) + liabilities_debt_short_term: Optional[float] = Field( + default=None, description="Liabilities Debt Short Term (Billions of EUR)" + ) + liabilities_debt_long_term: Optional[float] = Field( + default=None, description="Liabilities Debt Long Term (Billions of EUR)" + ) + liabilities_resident_sector_government: Optional[float] = Field( + default=None, + description="Liabilities Resident Sector Government (Billions of EUR)", + ) + liabilities_resident_sector_other: Optional[float] = Field( + default=None, description="Liabilities Resident Sector Other (Billions of EUR)" + ) + + +class ECBOtherInvestment(Data): + """ECB Other Investment Balance of Payments Items.""" + + period: dateType = Field( + default=None, + description="The date representing the beginning of the reporting period.", + ) + assets_total: Optional[float] = Field( + default=None, description="Assets Total (Billions of EUR)" + ) + assets_currency_and_deposits: Optional[float] = Field( + default=None, description="Assets Currency and Deposits (Billions of EUR)" + ) + assets_loans: Optional[float] = Field( + default=None, description="Assets Loans (Billions of EUR)" + ) + assets_trade_credit_and_advances: Optional[float] = Field( + default=None, description="Assets Trade Credits and Advances (Billions of EUR)" + ) + assets_eurosystem: Optional[float] = Field( + default=None, description="Assets Eurosystem (Billions of EUR)" + ) + assets_other_mfi_ex_eurosystem: Optional[float] = Field( + default=None, + description="Assets Other MFIs outside Eurosystem (Billions of EUR)", + ) + assets_government: Optional[float] = Field( + default=None, description="Assets Government (Billions of EUR)" + ) + assets_other_sectors: Optional[float] = Field( + default=None, description="Assets Other Sectors (Billions of EUR)" + ) + liabilities_total: Optional[float] = Field( + default=None, description="Liabilities Total (Billions of EUR)" + ) + liabilities_currency_and_deposits: Optional[float] = Field( + default=None, description="Liabilities Currency and Deposits (Billions of EUR)" + ) + liabilities_loans: Optional[float] = Field( + default=None, description="Liabilities Loans (Billions of EUR)" + ) + liabilities_trade_credit_and_advances: Optional[float] = Field( + default=None, + description="Liabilities Trade Credits and Advances (Billions of EUR)", + ) + liabilities_eurosystem: Optional[float] = Field( + default=None, description="Liabilities Eurosystem (Billions of EUR)" + ) + liabilities_other_mfi_ex_eurosystem: Optional[float] = Field( + default=None, + description="Liabilities Other MFIs outside Eurosystem (Billions of EUR)", + ) + liabilities_government: Optional[float] = Field( + default=None, description="Liabilities Government (Billions of EUR)" + ) + liabilities_other_sectors: Optional[float] = Field( + default=None, description="Liabilities Other Sectors (Billions of EUR)" + ) + + +class ECBCountry(Data): + """ECB Balance of Payments Items by Country.""" + + period: dateType = Field( + default=None, + description="The date representing the beginning of the reporting period.", + ) + current_account_balance: Optional[float] = Field( + default=None, + description="Current Account Balance (Billions of EUR)", + ) + current_account_credit: Optional[float] = Field( + default=None, + description="Current Account Credits (Billions of EUR)", + ) + current_account_debit: Optional[float] = Field( + default=None, + description="Current Account Debits (Billions of EUR)", + ) + goods_balance: Optional[float] = Field( + default=None, + description="Goods Balance (Billions of EUR)", + ) + goods_credit: Optional[float] = Field( + default=None, + description="Goods Credits (Billions of EUR)", + ) + goods_debit: Optional[float] = Field( + default=None, + description="Goods Debits (Billions of EUR)", + ) + services_balance: Optional[float] = Field( + default=None, + description="Services Balance (Billions of EUR)", + ) + services_credit: Optional[float] = Field( + default=None, + description="Services Credits (Billions of EUR)", + ) + services_debit: Optional[float] = Field( + default=None, + description="Services Debits (Billions of EUR)", + ) + primary_income_balance: Optional[float] = Field( + default=None, + description="Primary Income Balance (Billions of EUR)", + ) + primary_income_credit: Optional[float] = Field( + default=None, + description="Primary Income Credits (Billions of EUR)", + ) + primary_income_debit: Optional[float] = Field( + default=None, + description="Primary Income Debits (Billions of EUR)", + ) + investment_income_balance: Optional[float] = Field( + default=None, + description="Investment Income Balance (Billions of EUR)", + ) + investment_income_credit: Optional[float] = Field( + default=None, + description="Investment Income Credits (Billions of EUR)", + ) + investment_income_debit: Optional[float] = Field( + default=None, + description="Investment Income Debits (Billions of EUR)", + ) + secondary_income_balance: Optional[float] = Field( + default=None, + description="Secondary Income Balance (Billions of EUR)", + ) + secondary_income_credit: Optional[float] = Field( + default=None, + description="Secondary Income Credits (Billions of EUR)", + ) + secondary_income_debit: Optional[float] = Field( + default=None, + description="Secondary Income Debits (Billions of EUR)", + ) + capital_account_balance: Optional[float] = Field( + default=None, + description="Capital Account Balance (Billions of EUR)", + ) + capital_account_credit: Optional[float] = Field( + default=None, + description="Capital Account Credits (Billions of EUR)", + ) + capital_account_debit: Optional[float] = Field( + default=None, + description="Capital Account Debits (Billions of EUR)", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/balance_sheet.py b/openbb_platform/core/openbb_core/provider/standard_models/balance_sheet.py new file mode 100644 index 0000000000000000000000000000000000000000..5ae5918bf4452b0c19c4fa292f7d60a115388ae0 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/balance_sheet.py @@ -0,0 +1,38 @@ +"""Balance Sheet Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + QUERY_DESCRIPTIONS, +) +from pydantic import Field, NonNegativeInt, field_validator + + +class BalanceSheetQueryParams(QueryParams): + """Balance Sheet Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + limit: Optional[NonNegativeInt] = Field( + default=5, description=QUERY_DESCRIPTIONS.get("limit", "") + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str): + """Convert field to uppercase.""" + return v.upper() + + +class BalanceSheetData(Data): + """Balance Sheet Data.""" + + period_ending: dateType = Field(description="The end date of the reporting period.") + fiscal_period: Optional[str] = Field( + description="The fiscal period of the report.", default=None + ) + fiscal_year: Optional[int] = Field( + description="The fiscal year of the fiscal period.", default=None + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/balance_sheet_growth.py b/openbb_platform/core/openbb_core/provider/standard_models/balance_sheet_growth.py new file mode 100644 index 0000000000000000000000000000000000000000..43a92ab87c9d91dd6f27747718c6f63c6db64e54 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/balance_sheet_growth.py @@ -0,0 +1,36 @@ +"""Balance Sheet Statement Growth Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field, field_validator + + +class BalanceSheetGrowthQueryParams(QueryParams): + """Balance Sheet Statement Growth Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + limit: Optional[int] = Field( + default=10, description=QUERY_DESCRIPTIONS.get("limit", "") + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str): + """Convert field to uppercase.""" + return v.upper() + + +class BalanceSheetGrowthData(Data): + """Balance Sheet Statement Growth Data.""" + + period_ending: dateType = Field(description="The end date of the reporting period.") + fiscal_period: Optional[str] = Field( + description="The fiscal period of the report.", default=None + ) + fiscal_year: Optional[int] = Field( + description="The fiscal year of the fiscal period.", default=None + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/bls_search.py b/openbb_platform/core/openbb_core/provider/standard_models/bls_search.py new file mode 100644 index 0000000000000000000000000000000000000000..3bc4a23f315a335ca0ba8810b19cdd51ecaa4efd --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/bls_search.py @@ -0,0 +1,27 @@ +"""BLS Search Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class SearchQueryParams(QueryParams): + """BLS Search Query Params.""" + + query: str = Field( + default="", + description="The search word(s). Use semi-colon to separate multiple queries as an & operator.", + ) + + +class SearchData(Data): + """BLS Search Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + title: Optional[str] = Field(default=None, description="The title of the series.") + survey_name: Optional[str] = Field( + default=None, description="The name of the survey." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/bls_series.py b/openbb_platform/core/openbb_core/provider/standard_models/bls_series.py new file mode 100644 index 0000000000000000000000000000000000000000..fa48a9131c04d4edb568bb83d4299d4299bfd2c9 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/bls_series.py @@ -0,0 +1,43 @@ +"""BLS Series Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class SeriesQueryParams(QueryParams): + """BLS Series Query.""" + + symbol: str = Field( + description=QUERY_DESCRIPTIONS.get("symbol", ""), + ) + start_date: Optional[dateType] = Field( + description=QUERY_DESCRIPTIONS.get("start_date", ""), default=None + ) + end_date: Optional[dateType] = Field( + description=QUERY_DESCRIPTIONS.get("end_date", ""), default=None + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class SeriesData(Data): + """BLS Series Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + title: Optional[str] = Field(default=None, description="Title of the series.") + value: Optional[float] = Field( + default=None, description="Observation value for the symbol and date." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/bond_indices.py b/openbb_platform/core/openbb_core/provider/standard_models/bond_indices.py new file mode 100644 index 0000000000000000000000000000000000000000..a48f0d572a47a955145a3a7edac005b3f3e0b4ac --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/bond_indices.py @@ -0,0 +1,51 @@ +"""Bond Indices Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class BondIndicesQueryParams(QueryParams): + """Bond Indices Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + index_type: Literal["yield", "yield_to_worst", "total_return", "oas"] = Field( + default="yield", + description="The type of series. OAS is the option-adjusted spread. Default is yield.", + json_schema_extra={ + "choices": ["yield", "yield_to_worst", "total_return", "oas"] + }, + ) + + @field_validator("index_type", mode="before", check_fields=False) + @classmethod + def to_lower(cls, v: Optional[str]) -> Optional[str]: + """Convert field to lowercase.""" + return v.lower() if v else v + + +class BondIndicesData(Data): + """Bond Indices Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + symbol: Optional[str] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("symbol", ""), + ) + value: float = Field(description="Index values.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/bond_prices.py b/openbb_platform/core/openbb_core/provider/standard_models/bond_prices.py new file mode 100644 index 0000000000000000000000000000000000000000..5ec395aaa8a2f1ee6b29bfcf83a11a1b1af4b986 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/bond_prices.py @@ -0,0 +1,122 @@ +"""Bond Prices Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import List, Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from pydantic import Field + + +class BondPricesQueryParams(QueryParams): + """Bond Prices Query.""" + + country: Optional[str] = Field( + default=None, + description="The country to get data. Matches partial name.", + ) + issuer_name: Optional[str] = Field( + default=None, + description="Name of the issuer. Returns partial matches and is case insensitive.", + ) + isin: Optional[Union[List, str]] = Field( + default=None, + description="International Securities Identification Number(s) of the bond(s).", + ) + lei: Optional[str] = Field( + default=None, + description="Legal Entity Identifier of the issuing entity.", + ) + currency: Optional[Union[List, str]] = Field( + default=None, + description="Currency of the bond. Formatted as the 3-letter ISO 4217 code (e.g. GBP, EUR, USD).", + ) + coupon_min: Optional[float] = Field( + default=None, + description="Minimum coupon rate of the bond.", + ) + coupon_max: Optional[float] = Field( + default=None, + description="Maximum coupon rate of the bond.", + ) + issued_amount_min: Optional[int] = Field( + default=None, + description="Minimum issued amount of the bond.", + ) + issued_amount_max: Optional[str] = Field( + default=None, + description="Maximum issued amount of the bond.", + ) + maturity_date_min: Optional[dateType] = Field( + default=None, + description="Minimum maturity date of the bond.", + ) + maturity_date_max: Optional[dateType] = Field( + default=None, + description="Maximum maturity date of the bond.", + ) + ytm_max: Optional[float] = Field( + default=None, + description="Maximum yield to maturity of the bond.", + ) + ytm_min: Optional[float] = Field( + default=None, + description="Minimum yield to maturity of the bond.", + ) + + +class BondPricesData(Data): + """Bond Prices Data.""" + + isin: Optional[str] = Field( + default=None, + description="International Securities Identification Number of the bond.", + ) + lei: Optional[str] = Field( + default=None, + description="Legal Entity Identifier of the issuing entity.", + ) + figi: Optional[str] = Field(default=None, description="FIGI of the bond.") + cusip: Optional[str] = Field( + default=None, + description="CUSIP of the bond.", + ) + coupon_rate: Optional[float] = Field( + default=None, + description="Coupon rate of the bond.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + price: Optional[float] = Field( + default=None, + description="Price of the bond.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + current_yield: Optional[float] = Field( + default=None, + description="Current yield of the bond.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + ytm: Optional[float] = Field( + default=None, + description="Yield to maturity of the bond.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + ytw: Optional[float] = Field( + default=None, + description="Yield to worst of the bond.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + duration: Optional[float] = Field( + default=None, + description="Duration of the bond.", + ) + maturity_date: Optional[dateType] = Field( + default=None, + description="Maturity date of the bond.", + ) + call_date: Optional[dateType] = Field( + default=None, + description="The nearest call date of the bond.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/bond_reference.py b/openbb_platform/core/openbb_core/provider/standard_models/bond_reference.py new file mode 100644 index 0000000000000000000000000000000000000000..053fd46fad80d73da9972ece4d8a9a87753e1384 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/bond_reference.py @@ -0,0 +1,90 @@ +"""Bond Reference Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import List, Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from pydantic import Field, field_validator + + +class BondReferenceQueryParams(QueryParams): + """Bond Reference Query.""" + + country: Optional[str] = Field( + default=None, + description="The country to get data. Matches partial name.", + ) + issuer_name: Optional[str] = Field( + default=None, + description="Name of the issuer. Returns partial matches and is case insensitive.", + ) + isin: Optional[Union[List, str]] = Field( + default=None, + description="International Securities Identification Number(s) of the bond(s).", + ) + lei: Optional[str] = Field( + default=None, + description="Legal Entity Identifier of the issuing entity.", + ) + currency: Optional[Union[List, str]] = Field( + default=None, + description="Currency of the bond. Formatted as the 3-letter ISO 4217 code (e.g. GBP, EUR, USD).", + ) + coupon_min: Optional[float] = Field( + default=None, + description="Minimum coupon rate of the bond.", + ) + coupon_max: Optional[float] = Field( + default=None, + description="Maximum coupon rate of the bond.", + ) + issued_amount_min: Optional[int] = Field( + default=None, + description="Minimum issued amount of the bond.", + ) + issued_amount_max: Optional[str] = Field( + default=None, + description="Maximum issued amount of the bond.", + ) + maturity_date_min: Optional[dateType] = Field( + default=None, + description="Minimum maturity date of the bond.", + ) + maturity_date_max: Optional[dateType] = Field( + default=None, + description="Maximum maturity date of the bond.", + ) + + @field_validator("isin", "currency", "lei", mode="before", check_fields=False) + @classmethod + def validate_upper_case(cls, v): + """Convert the field to uppercase and convert a list to a query string.""" + if isinstance(v, str): + return v.upper() + return ",".join([symbol.upper() for symbol in list(v)]) if v else None + + +class BondReferenceData(Data): + """Bond Reference Search Data.""" + + isin: Optional[str] = Field( + default=None, + description="International Securities Identification Number of the bond.", + ) + lei: Optional[str] = Field( + default=None, + description="Legal Entity Identifier of the issuing entity.", + ) + figi: Optional[str] = Field(default=None, description="FIGI of the bond.") + cusip: Optional[str] = Field( + default=None, + description="CUSIP of the bond.", + ) + coupon_rate: Optional[float] = Field( + default=None, + description="Coupon rate of the bond.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/bond_trades.py b/openbb_platform/core/openbb_core/provider/standard_models/bond_trades.py new file mode 100644 index 0000000000000000000000000000000000000000..051c596ec3274e6e5629244eb56465bb477defe5 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/bond_trades.py @@ -0,0 +1,96 @@ +"""Bond Trades Standard Model.""" + +from datetime import ( + date as dateType, + datetime, +) +from typing import Literal, Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class BondTradesQueryParams(QueryParams): + """Bond Trades Query.""" + + country: Optional[str] = Field( + default=None, + description="The country to get data. Matches partial name.", + ) + isin: Optional[str] = Field( + default=None, + description="ISIN of the bond.", + ) + issuer_type: Optional[Literal["government", "corporate", "municipal"]] = Field( + default=None, + description="Type of bond issuer.", + ) + notional_currency: Optional[str] = Field( + default=None, + description=""" + Currency of the bond, which might differ from the currency of the trade. + Formatted as the 3-letter ISO 4217 code (e.g. GBP, EUR, USD). + """, + ) + start_date: Optional[Union[dateType, str]] = Field( + default=None, + description=( + QUERY_DESCRIPTIONS.get("start_date", "") + + " YYYY-MM-DD or ISO-8601 format. E.g. 2023-01-14T10:55:00Z" + ), + ) + end_date: Optional[Union[dateType, str]] = Field( + default=None, + description=( + QUERY_DESCRIPTIONS.get("end_date", "") + + " YYYY-MM-DD or ISO-8601 format. E.g. 2023-01-14T10:55:00Z" + ), + ) + + @field_validator("isin", "notional_currency", mode="before", check_fields=False) + @classmethod + def validate_upper_case(cls, v): + """Enforce upper case for fields.""" + return v.upper() if v else None + + +class BondTradesData(Data): + """Bond Trades Data.""" + + trade_date: Optional[Union[dateType, datetime]] = Field( + default=None, + description="Date of the transaction.", + ) + isin: Optional[str] = Field( + default=None, + description="ISIN of the bond.", + ) + figi: Optional[str] = Field(default=None, description="FIGI of the bond.") + cusip: Optional[str] = Field( + default=None, + description="CUSIP of the bond.", + ) + price: Optional[float] = Field( + default=None, + description="Price of the bond.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + current_yield: Optional[float] = Field( + default=None, + description="Current yield of the bond.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + coupon_rate: Optional[float] = Field( + default=None, + description="Coupon rate of the bond.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + volume: Optional[int] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("volume", ""), + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/calendar_dividend.py b/openbb_platform/core/openbb_core/provider/standard_models/calendar_dividend.py new file mode 100644 index 0000000000000000000000000000000000000000..136042e2fcfaea1532bce55ff919214c91d1ea31 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/calendar_dividend.py @@ -0,0 +1,48 @@ +"""Dividend Calendar Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class CalendarDividendQueryParams(QueryParams): + """Dividend Calendar Query.""" + + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date", "") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date", "") + ) + + +class CalendarDividendData(Data): + """Dividend Calendar Data.""" + + ex_dividend_date: dateType = Field( + description="The ex-dividend date - the date on which the stock begins trading without rights to the dividend." + ) + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + amount: Optional[float] = Field( + default=None, description="The dividend amount per share." + ) + name: Optional[str] = Field(default=None, description="Name of the entity.") + record_date: Optional[dateType] = Field( + default=None, + description="The record date of ownership for eligibility.", + ) + payment_date: Optional[dateType] = Field( + default=None, + description="The payment date of the dividend.", + ) + declaration_date: Optional[dateType] = Field( + default=None, + description="Declaration date of the dividend.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/calendar_earnings.py b/openbb_platform/core/openbb_core/provider/standard_models/calendar_earnings.py new file mode 100644 index 0000000000000000000000000000000000000000..695295289320261d137f52d983b7aa56c93cc810 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/calendar_earnings.py @@ -0,0 +1,39 @@ +"""Earnings Calendar Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class CalendarEarningsQueryParams(QueryParams): + """Earnings Calendar Query.""" + + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date", "") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date", "") + ) + + +class CalendarEarningsData(Data): + """Earnings Calendar Data.""" + + report_date: dateType = Field(description="The date of the earnings report.") + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + name: Optional[str] = Field(description="Name of the entity.", default=None) + eps_previous: Optional[float] = Field( + default=None, + description="The earnings-per-share from the same previously reported period.", + ) + eps_consensus: Optional[float] = Field( + default=None, + description="The analyst conesus earnings-per-share estimate.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/calendar_events.py b/openbb_platform/core/openbb_core/provider/standard_models/calendar_events.py new file mode 100644 index 0000000000000000000000000000000000000000..b8769d6ee6bc521703e26601c9a5114250a8dfa3 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/calendar_events.py @@ -0,0 +1,32 @@ +"""Company Events Calendar Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class CalendarEventsQueryParams(QueryParams): + """Company Events Calendar Query.""" + + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date", "") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date", "") + ) + + +class CalendarEventsData(Data): + """Company Events Calendar Data.""" + + date: dateType = Field( + description=DATA_DESCRIPTIONS.get("date", "") + " The date of the event." + ) + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/calendar_ipo.py b/openbb_platform/core/openbb_core/provider/standard_models/calendar_ipo.py new file mode 100644 index 0000000000000000000000000000000000000000..f9498a90280d922e3c1c2e213cc67d3b80e67609 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/calendar_ipo.py @@ -0,0 +1,42 @@ +"""IPO Calendar Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class CalendarIpoQueryParams(QueryParams): + """IPO Calendar Query.""" + + symbol: Optional[str] = Field( + description=QUERY_DESCRIPTIONS.get("symbol", ""), default=None + ) + start_date: Optional[dateType] = Field( + description=QUERY_DESCRIPTIONS.get("start_date", ""), default=None + ) + end_date: Optional[dateType] = Field( + description=QUERY_DESCRIPTIONS.get("end_date", ""), default=None + ) + limit: Optional[int] = Field( + description=QUERY_DESCRIPTIONS.get("limit", ""), default=100 + ) + + +class CalendarIpoData(Data): + """IPO Calendar Data.""" + + symbol: Optional[str] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("symbol", ""), + ) + ipo_date: Optional[dateType] = Field( + description="The date of the IPO, when the stock first trades on a major exchange.", + default=None, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/calendar_splits.py b/openbb_platform/core/openbb_core/provider/standard_models/calendar_splits.py new file mode 100644 index 0000000000000000000000000000000000000000..89077dd8540c9bb3cbef54c30c7fe30c093e5eb0 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/calendar_splits.py @@ -0,0 +1,33 @@ +"""Calendar Splits Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class CalendarSplitsQueryParams(QueryParams): + """Calendar Splits Query.""" + + start_date: Optional[dateType] = Field( + description=QUERY_DESCRIPTIONS.get("start_date", ""), default=None + ) + end_date: Optional[dateType] = Field( + description=QUERY_DESCRIPTIONS.get("end_date", ""), default=None + ) + + +class CalendarSplitsData(Data): + """Calendar Splits Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + label: str = Field(description="Label of the stock splits.") + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + numerator: float = Field(description="Numerator of the stock splits.") + denominator: float = Field(description="Denominator of the stock splits.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/cash_flow.py b/openbb_platform/core/openbb_core/provider/standard_models/cash_flow.py new file mode 100644 index 0000000000000000000000000000000000000000..f2dc959606ce336a70335aebc842642c3885fd1c --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/cash_flow.py @@ -0,0 +1,36 @@ +"""Cash Flow Statement Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field, NonNegativeInt, field_validator + + +class CashFlowStatementQueryParams(QueryParams): + """Cash Flow Statement Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + limit: Optional[NonNegativeInt] = Field( + default=5, description=QUERY_DESCRIPTIONS.get("limit", "") + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str): + """Convert field to uppercase.""" + return v.upper() + + +class CashFlowStatementData(Data): + """Cash Flow Statement Data.""" + + period_ending: dateType = Field(description="The end date of the reporting period.") + fiscal_period: Optional[str] = Field( + description="The fiscal period of the report.", default=None + ) + fiscal_year: Optional[int] = Field( + description="The fiscal year of the fiscal period.", default=None + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/cash_flow_growth.py b/openbb_platform/core/openbb_core/provider/standard_models/cash_flow_growth.py new file mode 100644 index 0000000000000000000000000000000000000000..8954578502923a1d84e97f4dbf29e2a3a5ab7f11 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/cash_flow_growth.py @@ -0,0 +1,36 @@ +"""Cash Flow Statement Growth Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field, field_validator + + +class CashFlowStatementGrowthQueryParams(QueryParams): + """Cash Flow Statement Growth Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + limit: Optional[int] = Field( + default=10, description=QUERY_DESCRIPTIONS.get("limit", "") + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class CashFlowStatementGrowthData(Data): + """Cash Flow Statement Growth Data.""" + + period_ending: dateType = Field(description="The end date of the reporting period.") + fiscal_period: Optional[str] = Field( + description="The fiscal period of the report.", default=None + ) + fiscal_year: Optional[int] = Field( + description="The fiscal year of the fiscal period.", default=None + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/central_bank_holdings.py b/openbb_platform/core/openbb_core/provider/standard_models/central_bank_holdings.py new file mode 100644 index 0000000000000000000000000000000000000000..0545f30afc507b6c39a4d4e3f96a28b724b27a41 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/central_bank_holdings.py @@ -0,0 +1,29 @@ +"""Central Bank Holdings Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class CentralBankHoldingsQueryParams(QueryParams): + """Central Bank Holdings Query.""" + + date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("date", ""), + ) + + +class CentralBankHoldingsData(Data): + """Central Bank Holdings Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/cik_map.py b/openbb_platform/core/openbb_core/provider/standard_models/cik_map.py new file mode 100644 index 0000000000000000000000000000000000000000..30d497db8ce2c1e0fe819ad68eaa2707a53d0496 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/cik_map.py @@ -0,0 +1,31 @@ +"""Cik Map Standard Model.""" + +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class CikMapQueryParams(QueryParams): + """CikMap Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class CikMapData(Data): + """CikMap Data.""" + + cik: Optional[Union[str, int]] = Field( + default=None, description=DATA_DESCRIPTIONS.get("cik", "") + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/commercial_paper.py b/openbb_platform/core/openbb_core/provider/standard_models/commercial_paper.py new file mode 100644 index 0000000000000000000000000000000000000000..7abfa490149b96fdb6620ac81f8d479a2c0d1248 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/commercial_paper.py @@ -0,0 +1,45 @@ +"""Commercial Paper Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class CommercialPaperParams(QueryParams): + """Commercial Paper Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class CommercialPaperData(Data): + """Commercial Paper Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + symbol: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("symbol", "") + ) + maturity: str = Field(description="Maturity length of the item.") + rate: float = Field( + description="Interest rate.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + title: Optional[str] = Field( + default=None, + description="Title of the series.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/commodity_spot_prices.py b/openbb_platform/core/openbb_core/provider/standard_models/commodity_spot_prices.py new file mode 100644 index 0000000000000000000000000000000000000000..431a910d774596f1ad0af8bf9558052e07899c9f --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/commodity_spot_prices.py @@ -0,0 +1,51 @@ +"""Commodity Spot Prices Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class CommoditySpotPricesQueryParams(QueryParams): + """Commodity Spot Prices Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class CommoditySpotPricesData(Data): + """Commodity Spot Prices Data.""" + + date: dateType = Field( + description=DATA_DESCRIPTIONS.get("date", ""), + ) + symbol: Optional[str] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("symbol", ""), + ) + commodity: Optional[str] = Field( + default=None, + description="Commodity name.", + ) + price: float = Field( + description="Price of the commodity.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + unit: Optional[str] = Field( + default=None, + description="Unit of the commodity price.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/company_filings.py b/openbb_platform/core/openbb_core/provider/standard_models/company_filings.py new file mode 100644 index 0000000000000000000000000000000000000000..47b4655fc5c2f7ca3a5e2a50ede5565f1262b569 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/company_filings.py @@ -0,0 +1,44 @@ +"""Company Filings Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import List, Optional, Set, Union + +from dateutil import parser +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class CompanyFilingsQueryParams(QueryParams): + """Company Filings Query.""" + + symbol: Optional[str] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("symbol", "") + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: Union[str, List[str], Set[str]]): + """Convert field to uppercase.""" + if isinstance(v, str): + return v.upper() + return ",".join([symbol.upper() for symbol in list(v)]) if v else None + + +class CompanyFilingsData(Data): + """Company Filings Data.""" + + filing_date: dateType = Field(description="The date of the filing.") + report_type: Optional[str] = Field(default=None, description="Type of filing.") + report_url: str = Field(description="URL to the actual report.") + + @field_validator("filing_date", "accepted_date", mode="before", check_fields=False) + @classmethod + def convert_date(cls, v: str): + """Convert date to date type.""" + return parser.parse(str(v)).date() if v else None diff --git a/openbb_platform/core/openbb_core/provider/standard_models/company_news.py b/openbb_platform/core/openbb_core/provider/standard_models/company_news.py new file mode 100644 index 0000000000000000000000000000000000000000..e36a0d2060232fc4d7d14e740698a7e3cffe5db6 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/company_news.py @@ -0,0 +1,75 @@ +"""Company News Standard Model.""" + +from datetime import ( + date as dateType, + datetime, +) +from typing import Dict, List, Optional + +from dateutil.relativedelta import relativedelta +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, NonNegativeInt, field_validator + + +class CompanyNewsQueryParams(QueryParams): + """Company news Query.""" + + symbol: Optional[str] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("symbol", ""), + ) + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date", "") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date", "") + ) + limit: Optional[NonNegativeInt] = Field( + default=2500, description=QUERY_DESCRIPTIONS.get("limit", "") + ) + + @field_validator("symbol", mode="before") + @classmethod + def symbols_validate(cls, v): + """Validate the symbols.""" + return v.upper() if v else None + + @field_validator("start_date", mode="before") + @classmethod + def start_date_validate(cls, v) -> dateType: # pylint: disable=E0213 + """Populate start date if empty.""" + if not v: + now = datetime.now().date() + v = now - relativedelta(weeks=16) + return v + + @field_validator("end_date", mode="before") + @classmethod + def end_date_validate(cls, v) -> dateType: # pylint: disable=E0213 + """Populate end date if empty.""" + if not v: + v = datetime.now().date() + return v + + +class CompanyNewsData(Data): + """Company News Data.""" + + date: datetime = Field( + description=DATA_DESCRIPTIONS.get("date", "") + + " Here it is the published date of the article." + ) + title: str = Field(description="Title of the article.") + text: Optional[str] = Field(default=None, description="Text/body of the article.") + images: Optional[List[Dict[str, str]]] = Field( + default=None, description="Images associated with the article." + ) + url: str = Field(description="URL to the article.") + symbols: Optional[str] = Field( + default=None, description="Symbols associated with the article." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/company_overview.py b/openbb_platform/core/openbb_core/provider/standard_models/company_overview.py new file mode 100644 index 0000000000000000000000000000000000000000..2963e5a378c886d0529e057f0fb80dcd8966010f --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/company_overview.py @@ -0,0 +1,106 @@ +"""Company Overview Standard Model.""" + +from datetime import date +from typing import List, Optional, Set, Union + +from openbb_core.provider.abstract.data import Data, ForceInt +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class CompanyOverviewQueryParams(QueryParams): + """Company Overview Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class CompanyOverviewData(Data): + """Company Overview Data. + + Returns the profile of a given company. + """ + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + price: Optional[float] = Field(default=None, description="Price of the company.") + beta: Optional[float] = Field(default=None, description="Beta of the company.") + vol_avg: Optional[ForceInt] = Field( + default=None, description="Volume average of the company." + ) + mkt_cap: Optional[ForceInt] = Field( + default=None, description="Market capitalization of the company." + ) + last_div: Optional[float] = Field( + default=None, description="Last dividend of the company." + ) + range: Optional[str] = Field(default=None, description="Range of the company.") + changes: Optional[float] = Field( + default=None, description="Changes of the company." + ) + company_name: Optional[str] = Field( + default=None, description="Company name of the company." + ) + currency: Optional[str] = Field( + default=None, description="Currency of the company." + ) + cik: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("cik", "") + ) + isin: Optional[str] = Field(default=None, description="ISIN of the company.") + cusip: Optional[str] = Field(default=None, description="CUSIP of the company.") + exchange: Optional[str] = Field( + default=None, description="Exchange of the company." + ) + exchange_short_name: Optional[str] = Field( + default=None, description="Exchange short name of the company." + ) + industry: Optional[str] = Field( + default=None, description="Industry of the company." + ) + website: Optional[str] = Field(default=None, description="Website of the company.") + description: Optional[str] = Field( + default=None, description="Description of the company." + ) + ceo: Optional[str] = Field(default=None, description="CEO of the company.") + sector: Optional[str] = Field(default=None, description="Sector of the company.") + country: Optional[str] = Field(default=None, description="Country of the company.") + full_time_employees: Optional[str] = Field( + default=None, description="Full time employees of the company." + ) + phone: Optional[str] = Field(default=None, description="Phone of the company.") + address: Optional[str] = Field(default=None, description="Address of the company.") + city: Optional[str] = Field(default=None, description="City of the company.") + state: Optional[str] = Field(default=None, description="State of the company.") + zip: Optional[str] = Field(default=None, description="Zip of the company.") + dcf_diff: Optional[float] = Field( + default=None, description="Discounted cash flow difference of the company." + ) + dcf: Optional[float] = Field( + default=None, description="Discounted cash flow of the company." + ) + image: Optional[str] = Field(default=None, description="Image of the company.") + ipo_date: Optional[date] = Field( + default=None, description="IPO date of the company." + ) + default_image: bool = Field(description="If the image is the default image.") + is_etf: bool = Field(description="If the company is an ETF.") + is_actively_trading: bool = Field(description="If the company is actively trading.") + is_adr: bool = Field(description="If the company is an ADR.") + is_fund: bool = Field(description="If the company is a fund.") + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: Union[str, List[str], Set[str]]): + """Convert field to uppercase.""" + if isinstance(v, str): + return v.upper() + return ",".join([symbol.upper() for symbol in list(v)]) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/compare_company_facts.py b/openbb_platform/core/openbb_core/provider/standard_models/compare_company_facts.py new file mode 100644 index 0000000000000000000000000000000000000000..41e75d325ca5448c9638ea2a1602a0ab6484c8a7 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/compare_company_facts.py @@ -0,0 +1,55 @@ +"""Compare Company Facts Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class CompareCompanyFactsQueryParams(QueryParams): + """Compare Company Facts Query.""" + + symbol: Optional[str] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("symbol", "") + ) + fact: str = Field( + default="", + description="The fact to lookup, typically a GAAP-reporting measure. Choices vary by provider.", + ) + + +class CompareCompanyFactsData(Data): + """Compare Company Facts Data.""" + + symbol: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("symbol", "") + ) + name: Optional[str] = Field(default=None, description="Name of the entity.") + value: float = Field( + description="The reported value of the fact or concept.", + ) + reported_date: Optional[dateType] = Field( + default=None, description="The date when the report was filed." + ) + period_beginning: Optional[dateType] = Field( + default=None, + description="The start date of the reporting period.", + ) + period_ending: Optional[dateType] = Field( + default=None, + description="The end date of the reporting period.", + ) + fiscal_year: Optional[int] = Field( + default=None, + description="The fiscal year.", + ) + fiscal_period: Optional[str] = Field( + default=None, + description="The fiscal period of the fiscal year.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/compare_groups.py b/openbb_platform/core/openbb_core/provider/standard_models/compare_groups.py new file mode 100644 index 0000000000000000000000000000000000000000..e9a6782792fb15665d4a16497619799c4e70d49f --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/compare_groups.py @@ -0,0 +1,12 @@ +"""Compare Groups Model.""" + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams + + +class CompareGroupsQueryParams(QueryParams): + """Compare Groups Query.""" + + +class CompareGroupsData(Data): + """Compare Groups Data.""" diff --git a/openbb_platform/core/openbb_core/provider/standard_models/composite_leading_indicator.py b/openbb_platform/core/openbb_core/provider/standard_models/composite_leading_indicator.py new file mode 100644 index 0000000000000000000000000000000000000000..3a15b73008006e72845714e2f5f0115bae052be1 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/composite_leading_indicator.py @@ -0,0 +1,35 @@ +"""Composite Leading Indicator Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class CompositeLeadingIndicatorQueryParams(QueryParams): + """Composite Leading Indicator Query.""" + + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date") + ) + + +class CompositeLeadingIndicatorData(Data): + """Composite Leading Indicator Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date")) + value: float = Field( + default=None, + description="CLI value", + json_schema_extra={"x-unit_measurement": "index"}, + ) + country: str = Field(description="Country for the CLI value.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/consumer_price_index.py b/openbb_platform/core/openbb_core/provider/standard_models/consumer_price_index.py new file mode 100644 index 0000000000000000000000000000000000000000..4d602c2e7104d0f0a2248d3511bd604d6374686d --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/consumer_price_index.py @@ -0,0 +1,55 @@ +"""CPI Standard Model.""" + +from datetime import date as dateType +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class ConsumerPriceIndexQueryParams(QueryParams): + """CPI Query.""" + + country: str = Field( + description=QUERY_DESCRIPTIONS.get("country"), + default="united_states", + ) + transform: Literal["index", "yoy", "period"] = Field( + description="Transformation of the CPI data. Period represents the change since previous." + + " Defaults to change from one year ago (yoy).", + default="yoy", + json_schema_extra={"choices": ["index", "yoy", "period"]}, + ) + frequency: Literal["annual", "quarter", "monthly"] = Field( + default="monthly", + description=QUERY_DESCRIPTIONS.get("frequency"), + json_schema_extra={"choices": ["annual", "quarter", "monthly"]}, + ) + harmonized: bool = Field( + default=False, description="If true, returns harmonized data." + ) + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date") + ) + + @field_validator("country", mode="before", check_fields=False) + @classmethod + def to_lower(cls, v): + """Convert country to lower case.""" + return v.replace(" ", "_").lower() + + +class ConsumerPriceIndexData(Data): + """CPI data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date")) + country: str = Field(description=DATA_DESCRIPTIONS.get("country")) + value: float = Field(description="CPI index value or period change.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/cot.py b/openbb_platform/core/openbb_core/provider/standard_models/cot.py new file mode 100644 index 0000000000000000000000000000000000000000..459ab0166d88451a73025f9ff05be645f6aa7be3 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/cot.py @@ -0,0 +1,80 @@ +"""Commitment of Traders Reports Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class COTQueryParams(QueryParams): + """Commitment of Traders Reports Query.""" + + id: str = Field( + description="A string with the CFTC market code or other identifying string," + + " such as the contract market name, commodity name, or commodity group - i.e, 'gold' or 'japanese yen'." + + "Default report is Fed Funds Futures. Use the 'cftc_market_code' for an exact match.", + default="045601", + ) + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", "") + + " Default is the most recent report.", + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date", "") + ) + + +class COTData(Data): + """Commitment of Traders Reports Data. + Data returned will vary based on the query, this model will not define all possible fields. + """ + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + report_week: Optional[str] = Field( + default=None, description="Report week for the year." + ) + market_and_exchange_names: Optional[str] = Field( + default=None, description="Market and exchange names." + ) + cftc_contract_market_code: Optional[str] = Field( + default=None, description="CFTC contract market code." + ) + cftc_market_code: Optional[str] = Field( + default=None, description="CFTC market code." + ) + cftc_region_code: Optional[str] = Field( + default=None, description="CFTC region code." + ) + cftc_commodity_code: Optional[str] = Field( + default=None, description="CFTC commodity code." + ) + cftc_contract_market_code_quotes: Optional[str] = Field( + default=None, description="CFTC contract market code quotes." + ) + cftc_market_code_quotes: Optional[str] = Field( + default=None, description="CFTC market code quotes." + ) + cftc_commodity_code_quotes: Optional[str] = Field( + default=None, description="CFTC commodity code quotes." + ) + cftc_subgroup_code: Optional[str] = Field( + default=None, description="CFTC subgroup code." + ) + commodity: Optional[str] = Field(default=None, description="Commodity.") + commodity_group: Optional[str] = Field( + default=None, description="Commodity group name." + ) + commodity_subgroup: Optional[str] = Field( + default=None, description="Commodity subgroup name." + ) + futonly_or_combined: Optional[str] = Field( + default=None, description="If the report is futures-only or combined." + ) + contract_units: Optional[str] = Field(default=None, description="Contract units.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/cot_search.py b/openbb_platform/core/openbb_core/provider/standard_models/cot_search.py new file mode 100644 index 0000000000000000000000000000000000000000..1942b99d6a645aacb7901aae102597c380b2c6a3 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/cot_search.py @@ -0,0 +1,33 @@ +"""Commitment of Traders Reports Search Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class CotSearchQueryParams(QueryParams): + """Commitment of Traders Reports Search Query.""" + + query: str = Field(description="Search query.", default="") + + +class CotSearchData(Data): + """Commitment of Traders Reports Search Data.""" + + code: str = Field(description="CFTC market contract code of the report.") + name: str = Field(description="Name of the underlying asset.") + category: Optional[str] = Field( + default=None, description="Category of the underlying asset." + ) + subcategory: Optional[str] = Field( + default=None, description="Subcategory of the underlying asset." + ) + units: Optional[str] = Field( + default=None, description="The units for one contract." + ) + symbol: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("symbol", "") + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/country_interest_rates.py b/openbb_platform/core/openbb_core/provider/standard_models/country_interest_rates.py new file mode 100644 index 0000000000000000000000000000000000000000..4f4c5124c2c439050495bb6788d94882297158da --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/country_interest_rates.py @@ -0,0 +1,42 @@ +"""Country Interest Rates Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class CountryInterestRatesQueryParams(QueryParams): + """Country Interest Rates Query.""" + + country: str = Field( + default="united_states", + description=QUERY_DESCRIPTIONS.get("country"), + ) + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date") + ) + + +class CountryInterestRatesData(Data): + """Country Interest Rates Data.""" + + date: dateType = Field(default=None, description=DATA_DESCRIPTIONS.get("date")) + value: float = Field( + default=None, + description="The interest rate value.", + json_schema_extra={"x-unit_measurment": "percent", "x-frontend_multiply": 100}, + ) + country: Optional[str] = Field( + default=None, + description="Country for which the interest rate is given.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/country_profile.py b/openbb_platform/core/openbb_core/provider/standard_models/country_profile.py new file mode 100644 index 0000000000000000000000000000000000000000..53ff1d2292f283982bb0365b02b48c6b81395326 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/country_profile.py @@ -0,0 +1,88 @@ +"""Country Profile Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class CountryProfileQueryParams(QueryParams): + """Country Profile Query.""" + + country: str = Field(description=QUERY_DESCRIPTIONS.get("country", "")) + + @field_validator("country", mode="before", check_fields=False) + @classmethod + def to_lower(cls, v: str) -> str: + """Convert the country to lowercase.""" + return v.lower().replace(" ", "_") + + +class CountryProfileData(Data): + """Country Profile Data.""" + + country: str = Field(description=DATA_DESCRIPTIONS.get("country", "")) + population: Optional[int] = Field(default=None, description="Population.") + gdp_usd: Optional[float] = Field( + default=None, description="Gross Domestic Product, in billions of USD." + ) + gdp_qoq: Optional[float] = Field( + default=None, + description="GDP growth quarter-over-quarter change, as a normalized percent.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + gdp_yoy: Optional[float] = Field( + default=None, + description="GDP growth year-over-year change, as a normalized percent.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + cpi_yoy: Optional[float] = Field( + default=None, + description="Consumer Price Index year-over-year change, as a normalized percent.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + core_yoy: Optional[float] = Field( + default=None, + description="Core Consumer Price Index year-over-year change, as a normalized percent.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + retail_sales_yoy: Optional[float] = Field( + default=None, + description="Retail Sales year-over-year change, as a normalized percent.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + industrial_production_yoy: Optional[float] = Field( + default=None, + description="Industrial Production year-over-year change, as a normalized percent.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + policy_rate: Optional[float] = Field( + default=None, + description="Short term policy rate, as a normalized percent.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + yield_10y: Optional[float] = Field( + default=None, + description="10-year government bond yield, as a normalized percent.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + govt_debt_gdp: Optional[float] = Field( + default=None, + description="Government debt as a percent (normalized) of GDP.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + current_account_gdp: Optional[float] = Field( + default=None, + description="Current account balance as a percent (normalized) of GDP.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + jobless_rate: Optional[float] = Field( + default=None, + description="Unemployment rate, as a normalized percent.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/crypto_historical.py b/openbb_platform/core/openbb_core/provider/standard_models/crypto_historical.py new file mode 100644 index 0000000000000000000000000000000000000000..ae2e991825d51bc083aba43999c12d9b480f0c47 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/crypto_historical.py @@ -0,0 +1,67 @@ +"""Crypto Historical Price Standard Model.""" + +from datetime import ( + date as dateType, + datetime, +) +from typing import List, Optional, Set, Union + +from dateutil import parser +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class CryptoHistoricalQueryParams(QueryParams): + """Crypto Historical Price Query.""" + + symbol: str = Field( + description=QUERY_DESCRIPTIONS.get("symbol", "") + + " Can use CURR1-CURR2 or CURR1CURR2 format." + ) + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def validate_symbol(cls, v: Union[str, List[str], Set[str]]): + """Convert field to uppercase and remove '-'.""" + if isinstance(v, str): + return v.upper().replace("-", "") + return ",".join([symbol.upper().replace("-", "") for symbol in list(v)]) + + +class CryptoHistoricalData(Data): + """Crypto Historical Price Data.""" + + date: Union[dateType, datetime] = Field( + description=DATA_DESCRIPTIONS.get("date", "") + ) + open: float = Field(description=DATA_DESCRIPTIONS.get("open", "")) + high: float = Field(description=DATA_DESCRIPTIONS.get("high", "")) + low: float = Field(description=DATA_DESCRIPTIONS.get("low", "")) + close: float = Field(description=DATA_DESCRIPTIONS.get("close", "")) + volume: Optional[float] = Field( + default=None, description=DATA_DESCRIPTIONS.get("volume", "") + ) + vwap: Optional[float] = Field( + default=None, description=DATA_DESCRIPTIONS.get("vwap", "") + ) + + @field_validator("date", mode="before", check_fields=False) + @classmethod + def date_validate(cls, v): # pylint: disable=E0213 + """Return formatted datetime.""" + if ":" in str(v): + return parser.isoparse(str(v)) + return parser.parse(str(v)).date() diff --git a/openbb_platform/core/openbb_core/provider/standard_models/crypto_search.py b/openbb_platform/core/openbb_core/provider/standard_models/crypto_search.py new file mode 100644 index 0000000000000000000000000000000000000000..a82a4299fde900f0b95790c3e62508d97b9bbd88 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/crypto_search.py @@ -0,0 +1,21 @@ +"""Crypto Search Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class CryptoSearchQueryParams(QueryParams): + """Crypto Search Query.""" + + query: Optional[str] = Field(description="Search query.", default=None) + + +class CryptoSearchData(Data): + """Crypto Search Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "") + " (Crypto)") + name: Optional[str] = Field(description="Name of the crypto.", default=None) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/currency_historical.py b/openbb_platform/core/openbb_core/provider/standard_models/currency_historical.py new file mode 100644 index 0000000000000000000000000000000000000000..7df26fce6a9c84169a78a7d7ca334be801478d70 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/currency_historical.py @@ -0,0 +1,59 @@ +"""Currency Historical Price Standard Model.""" + +from datetime import ( + date as dateType, + datetime, +) +from typing import List, Optional, Set, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, PositiveFloat, field_validator + + +class CurrencyHistoricalQueryParams(QueryParams): + """Currency Historical Price Query.""" + + symbol: str = Field( + description=QUERY_DESCRIPTIONS.get("symbol", "") + + " Can use CURR1-CURR2 or CURR1CURR2 format." + ) + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + @field_validator("symbol", mode="before", check_fields=False) + def validate_symbol( + cls, v: Union[str, List[str], Set[str]] + ): # pylint: disable=E0213 + """Convert field to uppercase and remove '-'.""" + if isinstance(v, str): + return v.upper().replace("-", "") + return ",".join([symbol.upper().replace("-", "") for symbol in list(v)]) + + +class CurrencyHistoricalData(Data): + """Currency Historical Price Data.""" + + date: Union[dateType, datetime] = Field( + description=DATA_DESCRIPTIONS.get("date", "") + ) + open: PositiveFloat = Field(description=DATA_DESCRIPTIONS.get("open", "")) + high: PositiveFloat = Field(description=DATA_DESCRIPTIONS.get("high", "")) + low: PositiveFloat = Field(description=DATA_DESCRIPTIONS.get("low", "")) + close: PositiveFloat = Field(description=DATA_DESCRIPTIONS.get("close", "")) + volume: Optional[float] = Field( + description=DATA_DESCRIPTIONS.get("volume", ""), default=None + ) + vwap: Optional[PositiveFloat] = Field( + description=DATA_DESCRIPTIONS.get("vwap", ""), default=None + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/currency_pairs.py b/openbb_platform/core/openbb_core/provider/standard_models/currency_pairs.py new file mode 100644 index 0000000000000000000000000000000000000000..669e87020aec3ce2b2ceaad5145f178613caf7d6 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/currency_pairs.py @@ -0,0 +1,23 @@ +"""Currency Available Pairs Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class CurrencyPairsQueryParams(QueryParams): + """Currency Available Pairs Query.""" + + query: Optional[str] = Field( + default=None, description="Query to search for currency pairs." + ) + + +class CurrencyPairsData(Data): + """Currency Available Pairs Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + name: Optional[str] = Field(default=None, description="Name of the currency pair.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/currency_reference_rates.py b/openbb_platform/core/openbb_core/provider/standard_models/currency_reference_rates.py new file mode 100644 index 0000000000000000000000000000000000000000..db7438b4fe51d15ed02ba41da8a0830ade083c65 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/currency_reference_rates.py @@ -0,0 +1,52 @@ +"""Currency Reference Rates Model.""" + +from datetime import ( + date as dateType, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class CurrencyReferenceRatesQueryParams(QueryParams): + """Currency Reference Rates Query.""" + + +class CurrencyReferenceRatesData(Data): + """Currency Reference Rates Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + EUR: Optional[float] = Field(description="Euro.", default=None) + USD: Optional[float] = Field(description="US Dollar.", default=None) + JPY: Optional[float] = Field(description="Japanese Yen.", default=None) + BGN: Optional[float] = Field(description="Bulgarian Lev.", default=None) + CZK: Optional[float] = Field(description="Czech Koruna.", default=None) + DKK: Optional[float] = Field(description="Danish Krone.", default=None) + GBP: Optional[float] = Field(description="Pound Sterling.", default=None) + HUF: Optional[float] = Field(description="Hungarian Forint.", default=None) + PLN: Optional[float] = Field(description="Polish Zloty.", default=None) + RON: Optional[float] = Field(description="Romanian Leu.", default=None) + SEK: Optional[float] = Field(description="Swedish Krona.", default=None) + CHF: Optional[float] = Field(description="Swiss Franc.", default=None) + ISK: Optional[float] = Field(description="Icelandic Krona.", default=None) + NOK: Optional[float] = Field(description="Norwegian Krone.", default=None) + TRY: Optional[float] = Field(description="Turkish Lira.", default=None) + AUD: Optional[float] = Field(description="Australian Dollar.", default=None) + BRL: Optional[float] = Field(description="Brazilian Real.", default=None) + CAD: Optional[float] = Field(description="Canadian Dollar.", default=None) + CNY: Optional[float] = Field(description="Chinese Yuan.", default=None) + HKD: Optional[float] = Field(description="Hong Kong Dollar.", default=None) + IDR: Optional[float] = Field(description="Indonesian Rupiah.", default=None) + ILS: Optional[float] = Field(description="Israeli Shekel.", default=None) + INR: Optional[float] = Field(description="Indian Rupee.", default=None) + KRW: Optional[float] = Field(description="South Korean Won.", default=None) + MXN: Optional[float] = Field(description="Mexican Peso.", default=None) + MYR: Optional[float] = Field(description="Malaysian Ringgit.", default=None) + NZD: Optional[float] = Field(description="New Zealand Dollar.", default=None) + PHP: Optional[float] = Field(description="Philippine Peso.", default=None) + SGD: Optional[float] = Field(description="Singapore Dollar.", default=None) + THB: Optional[float] = Field(description="Thai Baht.", default=None) + ZAR: Optional[float] = Field(description="South African Rand.", default=None) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/currency_snapshots.py b/openbb_platform/core/openbb_core/provider/standard_models/currency_snapshots.py new file mode 100644 index 0000000000000000000000000000000000000000..1447e85b9d73b799329b91499876dafc668cd9d9 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/currency_snapshots.py @@ -0,0 +1,81 @@ +"""Currency Snapshots Standard Model.""" + +from typing import List, Literal, Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field, field_validator + + +class CurrencySnapshotsQueryParams(QueryParams): + """Currency Snapshots Query Params.""" + + base: str = Field(description="The base currency symbol.", default="usd") + quote_type: Literal["direct", "indirect"] = Field( + description="Whether the quote is direct or indirect." + + " Selecting 'direct' will return the exchange rate" + + " as the amount of domestic currency required to buy one unit" + + " of the foreign currency." + + " Selecting 'indirect' (default) will return the exchange rate" + + " as the amount of foreign currency required to buy one unit" + + " of the domestic currency.", + default="indirect", + ) + counter_currencies: Optional[Union[str, List[str]]] = Field( + description="An optional list of counter currency symbols to filter for." + + " None returns all.", + default=None, + ) + + @field_validator("base", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v): + """Convert the base currency to uppercase.""" + return v.upper() + + @field_validator("counter_currencies", mode="before", check_fields=False) + @classmethod + def convert_string(cls, v): + """Convert the counter currencies to an upper case string list.""" + if v is not None: + return ",".join(v).upper() if isinstance(v, list) else v.upper() + return None + + +class CurrencySnapshotsData(Data): + """Currency Snapshots Data.""" + + base_currency: str = Field(description="The base, or domestic, currency.") + counter_currency: str = Field(description="The counter, or foreign, currency.") + last_rate: float = Field( + description="The exchange rate, relative to the base currency." + + " Rates are expressed as the amount of foreign currency" + + " received from selling one unit of the base currency," + + " or the quantity of foreign currency required to purchase" + + " one unit of the domestic currency." + + " To inverse the perspective, set the 'quote_type' parameter as 'direct'.", + ) + open: Optional[float] = Field( + description=DATA_DESCRIPTIONS.get("open", ""), + default=None, + ) + high: Optional[float] = Field( + description=DATA_DESCRIPTIONS.get("high", ""), + default=None, + ) + low: Optional[float] = Field( + description=DATA_DESCRIPTIONS.get("low", ""), + default=None, + ) + close: Optional[float] = Field( + description=DATA_DESCRIPTIONS.get("close", ""), + default=None, + ) + volume: Optional[int] = Field( + description=DATA_DESCRIPTIONS.get("volume", ""), default=None + ) + prev_close: Optional[float] = Field( + description=DATA_DESCRIPTIONS.get("prev_close", ""), + default=None, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/direction_of_trade.py b/openbb_platform/core/openbb_core/provider/standard_models/direction_of_trade.py new file mode 100644 index 0000000000000000000000000000000000000000..ca0c3680bcbdc7b21de558217f07dd8795032645 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/direction_of_trade.py @@ -0,0 +1,65 @@ +"""Direction Of Trade Standard Model.""" + +from datetime import date as dateType +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class DirectionOfTradeQueryParams(QueryParams): + """Direction Of Trade Query.""" + + __json_schema_extra__ = { + "direction": { + "choices": ["exports", "imports", "balance", "all"], + }, + "frequency": { + "choices": ["month", "quarter", "annual"], + }, + } + + country: Optional[str] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("country", "") + + " None is an equiavlent to 'all'. If 'all' is used, the counterpart field cannot be 'all'.", + ) + counterpart: Optional[str] = Field( + default=None, + description="Counterpart country to the trade. None is an equiavlent to 'all'." + + " If 'all' is used, the country field cannot be 'all'.", + ) + direction: Literal["exports", "imports", "balance", "all"] = Field( + default="balance", + description="Trade direction. Use 'all' to get all data for this dimension.", + ) + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date", "") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date", "") + ) + frequency: Literal["month", "quarter", "annual"] = Field( + default="month", description=QUERY_DESCRIPTIONS.get("frequency", "") + ) + + +class DirectionOfTradeData(Data): + """Direction Of Trade Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + symbol: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("symbol", "") + ) + country: str = Field(description=DATA_DESCRIPTIONS.get("country", "")) + counterpart: str = Field(description="Counterpart country or region to the trade.") + title: Optional[str] = Field( + default=None, description="Title corresponding to the symbol." + ) + value: float = Field(description="Trade value.") + scale: Optional[str] = Field(default=None, description="Scale of the value.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/discovery_filings.py b/openbb_platform/core/openbb_core/provider/standard_models/discovery_filings.py new file mode 100644 index 0000000000000000000000000000000000000000..f640a97ead781c9a5854f6ce41ab9624c1715514 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/discovery_filings.py @@ -0,0 +1,49 @@ +"""Discovery Filings Standard Model.""" + +from datetime import ( + date as dateType, + datetime, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, NonNegativeInt + + +class DiscoveryFilingsQueryParams(QueryParams): + """Discovery Filings Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS["start_date"], + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS["end_date"], + ) + form_type: Optional[str] = Field( + default=None, + description=( + "Filter by form type. Visit https://www.sec.gov/forms " + "for a list of supported form types." + ), + ) + limit: NonNegativeInt = Field( + default=100, description=QUERY_DESCRIPTIONS.get("limit", "") + ) + + +class DiscoveryFilingsData(Data): + """Discovery Filings Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + cik: str = Field(description=DATA_DESCRIPTIONS.get("cik", "")) + title: str = Field(description="Title of the filing.") + date: datetime = Field(description=DATA_DESCRIPTIONS.get("date", "")) + form_type: str = Field(description="The form type of the filing") + link: str = Field(description="URL to the filing page on the SEC site.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/dwpcr_rates.py b/openbb_platform/core/openbb_core/provider/standard_models/dwpcr_rates.py new file mode 100644 index 0000000000000000000000000000000000000000..5141a412f5d817f3aebd33ec9825a65ee364b760 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/dwpcr_rates.py @@ -0,0 +1,34 @@ +"""Discount Window Primary Credit Rate Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class DiscountWindowPrimaryCreditRateParams(QueryParams): + """Discount Window Primary Credit Rate Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class DiscountWindowPrimaryCreditRateData(Data): + """Discount Window Primary Credit Rate Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + rate: Optional[float] = Field(description="Discount Window Primary Credit Rate.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/earnings_call_transcript.py b/openbb_platform/core/openbb_core/provider/standard_models/earnings_call_transcript.py new file mode 100644 index 0000000000000000000000000000000000000000..c947800d04c327f19ef5f22d2a7e563fe45c2b21 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/earnings_call_transcript.py @@ -0,0 +1,43 @@ +"""Earnings Call Transcript Standard Model.""" + +from datetime import datetime +from typing import List, Set, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class EarningsCallTranscriptQueryParams(QueryParams): + """Earnings Call Transcript rating Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + year: Union[int, str] = Field(description="Year of the earnings call transcript.") + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class EarningsCallTranscriptData(Data): + """Earnings Call Transcript Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + quarter: int = Field(description="Quarter of the earnings call transcript.") + year: int = Field(description="Year of the earnings call transcript.") + date: datetime = Field(description=DATA_DESCRIPTIONS.get("date", "")) + content: str = Field(description="Content of the earnings call transcript.") + + @field_validator("symbol", mode="before") + @classmethod + def to_upper(cls, v: Union[str, List[str], Set[str]]): + """Convert field to uppercase.""" + if isinstance(v, str): + return v.upper() + return ",".join([symbol.upper() for symbol in list(v)]) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/ecb_interest_rates.py b/openbb_platform/core/openbb_core/provider/standard_models/ecb_interest_rates.py new file mode 100644 index 0000000000000000000000000000000000000000..ce8f55a73233cbda70d95a989e36692007b5d9dc --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/ecb_interest_rates.py @@ -0,0 +1,44 @@ +"""European Central Bank Interest Rates Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class EuropeanCentralBankInterestRatesParams(QueryParams): + """European Central Bank Interest Rates Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + interest_rate_type: Literal["deposit", "lending", "refinancing"] = Field( + default="lending", + description="The type of interest rate.", + ) + + @field_validator("interest_rate_type", mode="before", check_fields=False) + @classmethod + def to_lower(cls, v: Optional[str]) -> Optional[str]: + """Convert field to lowercase.""" + return v.lower() if v else v + + +class EuropeanCentralBankInterestRatesData(Data): + """European Central Bank Interest Rates Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + rate: Optional[float] = Field(description="European Central Bank Interest Rate.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/economic_calendar.py b/openbb_platform/core/openbb_core/provider/standard_models/economic_calendar.py new file mode 100644 index 0000000000000000000000000000000000000000..4665c4e435160b5a46d9740484bbcfae621efd18 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/economic_calendar.py @@ -0,0 +1,60 @@ +"""Economic Calendar Standard Model.""" + +from datetime import ( + date as dateType, + datetime, +) +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class EconomicCalendarQueryParams(QueryParams): + """Economic Calendar Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class EconomicCalendarData(Data): + """Economic Calendar Data.""" + + date: Optional[datetime] = Field( + default=None, description=DATA_DESCRIPTIONS.get("date", "") + ) + country: Optional[str] = Field(default=None, description="Country of event.") + category: Optional[str] = Field(default=None, description="Category of event.") + event: Optional[str] = Field(default=None, description="Event name.") + importance: Optional[str] = Field( + default=None, description="The importance level for the event." + ) + source: Optional[str] = Field(default=None, description="Source of the data.") + currency: Optional[str] = Field(default=None, description="Currency of the data.") + unit: Optional[str] = Field(default=None, description="Unit of the data.") + consensus: Optional[Union[str, float]] = Field( + default=None, + description="Average forecast among a representative group of economists.", + ) + previous: Optional[Union[str, float]] = Field( + default=None, + description="Value for the previous period after the revision (if revision is applicable).", + ) + revised: Optional[Union[str, float]] = Field( + default=None, + description="Revised previous value, if applicable.", + ) + actual: Optional[Union[str, float]] = Field( + default=None, description="Latest released value." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/economic_indicators.py b/openbb_platform/core/openbb_core/provider/standard_models/economic_indicators.py new file mode 100644 index 0000000000000000000000000000000000000000..78c6f96357b57df6242fb41726b167ad4a8c5393 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/economic_indicators.py @@ -0,0 +1,46 @@ +"""Economic Indicators Standard Model.""" + +from datetime import date as dateType +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class EconomicIndicatorsQueryParams(QueryParams): + """Economic Indicators Query.""" + + country: Optional[str] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("country", "") + + " The country represented by the indicator, if available.", + ) + start_date: Optional[dateType] = Field( + description=QUERY_DESCRIPTIONS.get("start_date", ""), default=None + ) + end_date: Optional[dateType] = Field( + description=QUERY_DESCRIPTIONS.get("end_date", ""), default=None + ) + + +class EconomicIndicatorsData(Data): + """Economic Indicators Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + symbol_root: Optional[str] = Field( + default=None, description="The root symbol for the indicator (e.g. GDP)." + ) + symbol: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("symbol", "") + ) + country: Optional[str] = Field( + default=None, description="The country represented by the data." + ) + value: Optional[Union[int, float]] = Field( + default=None, description=DATA_DESCRIPTIONS.get("value", "") + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/equity_ftd.py b/openbb_platform/core/openbb_core/provider/standard_models/equity_ftd.py new file mode 100644 index 0000000000000000000000000000000000000000..427ed402a0eb1d53e9a143342054a83618a71ed5 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/equity_ftd.py @@ -0,0 +1,60 @@ +"""Equity FTD Standard Model.""" + +from datetime import ( + date as dateType, + datetime, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class EquityFtdQueryParams(QueryParams): + """Equity FTD Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str): + """Convert field to uppercase.""" + return v.upper() + + +class EquityFtdData(Data): + """Equity FTD Data.""" + + settlement_date: Optional[dateType] = Field( + description="The settlement date of the fail.", default=None + ) + symbol: Optional[str] = Field( + description=DATA_DESCRIPTIONS.get("symbol", ""), + default=None, + ) + cusip: Optional[str] = Field( + description="CUSIP of the Security.", + default=None, + ) + quantity: Optional[int] = Field( + description="The number of fails on that settlement date.", + default=None, + ) + price: Optional[float] = Field( + description="The price at the previous closing price from the settlement date.", + default=None, + ) + description: Optional[str] = Field( + description="The description of the Security.", + default=None, + ) + + @field_validator("settlement_date", mode="before") + def date_validate(cls, v): # pylint: disable=E0213 + """Return the date as a datetime object.""" + return datetime.strftime(v, "%Y-%m-%d") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/equity_historical.py b/openbb_platform/core/openbb_core/provider/standard_models/equity_historical.py new file mode 100644 index 0000000000000000000000000000000000000000..ab3f230319f89fdb2a780952992f2fa1f0be5af6 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/equity_historical.py @@ -0,0 +1,64 @@ +"""Equity Historical Price Standard Model.""" + +from datetime import ( + date as dateType, + datetime, +) +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class EquityHistoricalQueryParams(QueryParams): + """Equity Historical Price Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class EquityHistoricalData(Data): + """Equity Historical Price Data.""" + + date: Union[dateType, datetime] = Field( + description=DATA_DESCRIPTIONS.get("date", "") + ) + open: float = Field(description=DATA_DESCRIPTIONS.get("open", "")) + high: float = Field(description=DATA_DESCRIPTIONS.get("high", "")) + low: float = Field(description=DATA_DESCRIPTIONS.get("low", "")) + close: float = Field(description=DATA_DESCRIPTIONS.get("close", "")) + volume: Optional[Union[float, int]] = Field( + default=None, description=DATA_DESCRIPTIONS.get("volume", "") + ) + vwap: Optional[float] = Field( + default=None, description=DATA_DESCRIPTIONS.get("vwap", "") + ) + + @field_validator("date", mode="before", check_fields=False) + @classmethod + def date_validate(cls, v): + """Return formatted datetime.""" + # pylint: disable=import-outside-toplevel + from dateutil import parser + + if ":" in str(v): + return parser.isoparse(str(v)) + return parser.parse(str(v)).date() diff --git a/openbb_platform/core/openbb_core/provider/standard_models/equity_info.py b/openbb_platform/core/openbb_core/provider/standard_models/equity_info.py new file mode 100644 index 0000000000000000000000000000000000000000..274d62401ada86a6936f2aef3d7edb86b5f2cf7d --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/equity_info.py @@ -0,0 +1,142 @@ +"""Equity Info Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class EquityInfoQueryParams(QueryParams): + """Equity Info Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class EquityInfoData(Data): + """Equity Info Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + name: Optional[str] = Field(default=None, description="Common name of the company.") + cik: Optional[str] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("cik", ""), + ) + cusip: Optional[str] = Field( + default=None, description="CUSIP identifier for the company." + ) + isin: Optional[str] = Field( + default=None, description="International Securities Identification Number." + ) + lei: Optional[str] = Field( + default=None, description="Legal Entity Identifier assigned to the company." + ) + legal_name: Optional[str] = Field( + default=None, description="Official legal name of the company." + ) + stock_exchange: Optional[str] = Field( + default=None, description="Stock exchange where the company is traded." + ) + sic: Optional[int] = Field( + default=None, + description="Standard Industrial Classification code for the company.", + ) + short_description: Optional[str] = Field( + default=None, description="Short description of the company." + ) + long_description: Optional[str] = Field( + default=None, description="Long description of the company." + ) + ceo: Optional[str] = Field( + default=None, description="Chief Executive Officer of the company." + ) + company_url: Optional[str] = Field( + default=None, description="URL of the company's website." + ) + business_address: Optional[str] = Field( + default=None, description="Address of the company's headquarters." + ) + mailing_address: Optional[str] = Field( + default=None, description="Mailing address of the company." + ) + business_phone_no: Optional[str] = Field( + default=None, description="Phone number of the company's headquarters." + ) + hq_address1: Optional[str] = Field( + default=None, description="Address of the company's headquarters." + ) + hq_address2: Optional[str] = Field( + default=None, description="Address of the company's headquarters." + ) + hq_address_city: Optional[str] = Field( + default=None, description="City of the company's headquarters." + ) + hq_address_postal_code: Optional[str] = Field( + default=None, description="Zip code of the company's headquarters." + ) + hq_state: Optional[str] = Field( + default=None, description="State of the company's headquarters." + ) + hq_country: Optional[str] = Field( + default=None, description="Country of the company's headquarters." + ) + inc_state: Optional[str] = Field( + default=None, description="State in which the company is incorporated." + ) + inc_country: Optional[str] = Field( + default=None, description="Country in which the company is incorporated." + ) + employees: Optional[int] = Field( + default=None, description="Number of employees working for the company." + ) + entity_legal_form: Optional[str] = Field( + default=None, description="Legal form of the company." + ) + entity_status: Optional[str] = Field( + default=None, description="Status of the company." + ) + latest_filing_date: Optional[dateType] = Field( + default=None, description="Date of the company's latest filing." + ) + irs_number: Optional[str] = Field( + default=None, description="IRS number assigned to the company." + ) + sector: Optional[str] = Field( + default=None, description="Sector in which the company operates." + ) + industry_category: Optional[str] = Field( + default=None, description="Category of industry in which the company operates." + ) + industry_group: Optional[str] = Field( + default=None, description="Group of industry in which the company operates." + ) + template: Optional[str] = Field( + default=None, + description="Template used to standardize the company's financial statements.", + ) + standardized_active: Optional[bool] = Field( + default=None, description="Whether the company is active or not." + ) + first_fundamental_date: Optional[dateType] = Field( + default=None, description="Date of the company's first fundamental." + ) + last_fundamental_date: Optional[dateType] = Field( + default=None, description="Date of the company's last fundamental." + ) + first_stock_price_date: Optional[dateType] = Field( + default=None, description="Date of the company's first stock price." + ) + last_stock_price_date: Optional[dateType] = Field( + default=None, description="Date of the company's last stock price." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/equity_nbbo.py b/openbb_platform/core/openbb_core/provider/standard_models/equity_nbbo.py new file mode 100644 index 0000000000000000000000000000000000000000..806221b45fdb54fba6296d1361974cbcef9f56ef --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/equity_nbbo.py @@ -0,0 +1,47 @@ +"""Equity NBBO Standard Model.""" + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field, field_validator + + +class EquityNBBOQueryParams(QueryParams): + """Equity NBBO Query.""" + + symbol: str = Field( + description=QUERY_DESCRIPTIONS.get("symbol", ""), + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str): + """Convert field to uppercase.""" + return v.upper() + + +class EquityNBBOData(Data): + """Equity NBBO Data.""" + + ask_exchange: str = Field( + description="The exchange ID for the ask.", + ) + ask: float = Field( + description="The last ask price.", + ) + ask_size: int = Field( + description=""" + The ask size. This represents the number of round lot orders at the given ask price. + The normal round lot size is 100 shares. + An ask size of 2 means there are 200 shares available to purchase at the given ask price. + """, + ) + bid_size: int = Field( + description="The bid size in round lots.", + ) + bid: float = Field( + description="The last bid price.", + ) + bid_exchange: str = Field( + description="The exchange ID for the bid.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/equity_ownership.py b/openbb_platform/core/openbb_core/provider/standard_models/equity_ownership.py new file mode 100644 index 0000000000000000000000000000000000000000..d40c9102fd1ddb67233d02fd8cd4a862b9b17e56 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/equity_ownership.py @@ -0,0 +1,156 @@ +"""Equity Ownership Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class EquityOwnershipQueryParams(QueryParams): + """Equity Ownership Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("date", "") + ) + page: Optional[int] = Field( + default=0, description="Page number of the data to fetch." + ) + + @field_validator("date", mode="before", check_fields=False) + @classmethod + def time_validate(cls, v: str): + """Validate the date.""" + if v is None: + v = dateType.today() + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class EquityOwnershipData(Data): + """Equity Ownership Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + cik: int = Field(description=DATA_DESCRIPTIONS.get("cik", "")) + filing_date: dateType = Field(description="Filing date of the stock ownership.") + investor_name: str = Field( + ..., + description="Investor name of the stock ownership.", + ) + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + security_name: str = Field( + ..., + description="Security name of the stock ownership.", + ) + type_of_security: str = Field( + ..., + description="Type of security of the stock ownership.", + ) + security_cusip: str = Field( + ..., + description="Security cusip of the stock ownership.", + ) + shares_type: str = Field(description="Shares type of the stock ownership.") + put_call_share: str = Field( + ..., + description="Put call share of the stock ownership.", + ) + investment_discretion: str = Field( + ..., + description="Investment discretion of the stock ownership.", + ) + industry_title: str = Field( + ..., + description="Industry title of the stock ownership.", + ) + weight: float = Field(description="Weight of the stock ownership.") + last_weight: float = Field(description="Last weight of the stock ownership.") + change_in_weight: float = Field( + ..., + description="Change in weight of the stock ownership.", + ) + change_in_weight_percentage: float = Field( + ..., + description="Change in weight percentage of the stock ownership.", + ) + market_value: int = Field(description="Market value of the stock ownership.") + last_market_value: int = Field( + ..., + description="Last market value of the stock ownership.", + ) + change_in_market_value: int = Field( + ..., + description="Change in market value of the stock ownership.", + ) + change_in_market_value_percentage: float = Field( + ..., + description="Change in market value percentage of the stock ownership.", + ) + shares_number: int = Field( + ..., + description="Shares number of the stock ownership.", + ) + last_shares_number: int = Field( + ..., + description="Last shares number of the stock ownership.", + ) + change_in_shares_number: float = Field( + ..., + description="Change in shares number of the stock ownership.", + ) + change_in_shares_number_percentage: float = Field( + ..., + description="Change in shares number percentage of the stock ownership.", + ) + quarter_end_price: float = Field( + ..., + description="Quarter end price of the stock ownership.", + ) + avg_price_paid: float = Field( + ..., + description="Average price paid of the stock ownership.", + ) + is_new: bool = Field(description="Is the stock ownership new.") + is_sold_out: bool = Field(description="Is the stock ownership sold out.") + ownership: float = Field(description="How much is the ownership.") + last_ownership: float = Field(description="Last ownership amount.") + change_in_ownership: float = Field(description="Change in ownership amount.") + change_in_ownership_percentage: float = Field( + ..., + description="Change in ownership percentage.", + ) + holding_period: int = Field( + ..., + description="Holding period of the stock ownership.", + ) + first_added: dateType = Field( + ..., + description="First added date of the stock ownership.", + ) + performance: float = Field(description="Performance of the stock ownership.") + performance_percentage: float = Field( + ..., + description="Performance percentage of the stock ownership.", + ) + last_performance: float = Field( + ..., + description="Last performance of the stock ownership.", + ) + change_in_performance: float = Field( + ..., + description="Change in performance of the stock ownership.", + ) + is_counted_for_performance: bool = Field( + ..., + description="Is the stock ownership counted for performance.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/equity_peers.py b/openbb_platform/core/openbb_core/provider/standard_models/equity_peers.py new file mode 100644 index 0000000000000000000000000000000000000000..5fb5207521950732ab9896bbe8846d92505ba46f --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/equity_peers.py @@ -0,0 +1,29 @@ +"""Equity Peers Standard Model.""" + +from typing import List + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field, field_validator + + +class EquityPeersQueryParams(QueryParams): + """Equity Peers Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class EquityPeersData(Data): + """Equity Peers Data.""" + + peers_list: List[str] = Field( + default_factory=list, + description="A list of equity peers based on sector, exchange and market cap.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/equity_performance.py b/openbb_platform/core/openbb_core/provider/standard_models/equity_performance.py new file mode 100644 index 0000000000000000000000000000000000000000..89711a1d94447f09edb200cef170e8f17aff1424 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/equity_performance.py @@ -0,0 +1,50 @@ +"""Equity Performance Standard Model.""" + +from typing import Literal, Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field, field_validator + + +class EquityPerformanceQueryParams(QueryParams): + """Equity Performance Query.""" + + sort: Literal["asc", "desc"] = Field( + default="desc", + description="Sort order. Possible values: 'asc', 'desc'. Default: 'desc'.", + ) + + @field_validator("sort", mode="before", check_fields=False) + @classmethod + def to_lower(cls, v: Optional[str]) -> Optional[str]: + """Convert field to lowercase.""" + return v.lower() if v else v + + +class EquityPerformanceData(Data): + """Equity Performance Data.""" + + symbol: str = Field( + description=DATA_DESCRIPTIONS.get("symbol", ""), + ) + name: Optional[str] = Field( + default=None, + description="Name of the entity.", + ) + price: float = Field( + description="Last price.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + change: float = Field( + description="Change in price.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + percent_change: float = Field( + description="Percent change.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + volume: Union[int, float] = Field( + description=DATA_DESCRIPTIONS.get("volume", ""), + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/equity_quote.py b/openbb_platform/core/openbb_core/provider/standard_models/equity_quote.py new file mode 100644 index 0000000000000000000000000000000000000000..21c35e0ed16e8e557fe62638dd08f1c76b025c64 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/equity_quote.py @@ -0,0 +1,151 @@ +"""Equity Quote Standard Model.""" + +from datetime import datetime +from typing import List, Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class EquityQuoteQueryParams(QueryParams): + """Equity Quote Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class EquityQuoteData(Data): + """Equity Quote Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + asset_type: Optional[str] = Field( + default=None, description="Type of asset - i.e, stock, ETF, etc." + ) + name: Optional[str] = Field( + default=None, description="Name of the company or asset." + ) + exchange: Optional[str] = Field( + default=None, + description="The name or symbol of the venue where the data is from.", + ) + bid: Optional[float] = Field( + default=None, description="Price of the top bid order." + ) + bid_size: Optional[int] = Field( + default=None, + description="This represents the number of round lot orders at the given price." + + " The normal round lot size is 100 shares." + + " A size of 2 means there are 200 shares available at the given price.", + ) + bid_exchange: Optional[str] = Field( + default=None, + description="The specific trading venue where the purchase order was placed.", + ) + ask: Optional[float] = Field( + default=None, description="Price of the top ask order." + ) + ask_size: Optional[int] = Field( + default=None, + description="This represents the number of round lot orders at the given price." + + " The normal round lot size is 100 shares." + + " A size of 2 means there are 200 shares available at the given price.", + ) + ask_exchange: Optional[str] = Field( + default=None, + description="The specific trading venue where the sale order was placed.", + ) + quote_conditions: Optional[Union[str, int, List[str], List[int]]] = Field( + default=None, + description="Conditions or condition codes applicable to the quote.", + ) + quote_indicators: Optional[Union[str, int, List[str], List[int]]] = Field( + default=None, + description="Indicators or indicator codes applicable to the participant" + + " quote related to the price bands for the issue, or the affect the quote has" + + " on the NBBO.", + ) + sales_conditions: Optional[Union[str, int, List[str], List[int]]] = Field( + default=None, + description="Conditions or condition codes applicable to the sale.", + ) + sequence_number: Optional[int] = Field( + default=None, + description="The sequence number represents the sequence in which message events happened." + + " These are increasing and unique per ticker symbol," + + " but will not always be sequential (e.g., 1, 2, 6, 9, 10, 11).", + ) + market_center: Optional[str] = Field( + default=None, + description="The ID of the UTP participant that originated the message.", + ) + participant_timestamp: Optional[datetime] = Field( + default=None, + description="Timestamp for when the quote was generated by the exchange.", + ) + trf_timestamp: Optional[datetime] = Field( + default=None, + description="Timestamp for when the TRF (Trade Reporting Facility) received the message.", + ) + sip_timestamp: Optional[datetime] = Field( + default=None, + description="Timestamp for when the SIP (Security Information Processor)" + + " received the message from the exchange.", + ) + last_price: Optional[float] = Field( + default=None, description="Price of the last trade." + ) + last_tick: Optional[str] = Field( + default=None, description="Whether the last sale was an up or down tick." + ) + last_size: Optional[int] = Field( + default=None, description="Size of the last trade." + ) + last_timestamp: Optional[datetime] = Field( + default=None, description="Date and Time when the last price was recorded." + ) + open: Optional[float] = Field( + default=None, description=DATA_DESCRIPTIONS.get("open", "") + ) + high: Optional[float] = Field( + default=None, description=DATA_DESCRIPTIONS.get("high", "") + ) + low: Optional[float] = Field( + default=None, description=DATA_DESCRIPTIONS.get("low", "") + ) + close: Optional[float] = Field( + default=None, description=DATA_DESCRIPTIONS.get("close", "") + ) + volume: Optional[Union[int, float]] = Field( + default=None, description=DATA_DESCRIPTIONS.get("volume", "") + ) + exchange_volume: Optional[Union[int, float]] = Field( + default=None, + description="Volume of shares exchanged during the trading day on the specific exchange.", + ) + prev_close: Optional[float] = Field( + default=None, description=DATA_DESCRIPTIONS.get("prev_close", "") + ) + change: Optional[float] = Field( + default=None, description="Change in price from previous close." + ) + change_percent: Optional[float] = Field( + default=None, + description="Change in price as a normalized percentage.", + json_schema_extra={"x-frontendmultiply": 100}, + ) + year_high: Optional[float] = Field( + default=None, description="The one year high (52W High)." + ) + year_low: Optional[float] = Field( + default=None, description="The one year low (52W Low)." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/equity_screener.py b/openbb_platform/core/openbb_core/provider/standard_models/equity_screener.py new file mode 100644 index 0000000000000000000000000000000000000000..dde3e8f11bc3669ef84818f89ffd25f677cdeaeb --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/equity_screener.py @@ -0,0 +1,19 @@ +"""Equity Screener Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class EquityScreenerQueryParams(QueryParams): + """Equity Screener Query.""" + + +class EquityScreenerData(Data): + """Equity Screener Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + name: Optional[str] = Field(default=None, description="Name of the company.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/equity_search.py b/openbb_platform/core/openbb_core/provider/standard_models/equity_search.py new file mode 100644 index 0000000000000000000000000000000000000000..43cac51869e9c7693686f6f0afabfea01b251465 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/equity_search.py @@ -0,0 +1,26 @@ +"""Equity Search Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class EquitySearchQueryParams(QueryParams): + """Equity Search Query.""" + + query: str = Field(description="Search query.", default="") + is_symbol: bool = Field( + description="Whether to search by ticker symbol.", default=False + ) + + +class EquitySearchData(Data): + """Equity Search Data.""" + + symbol: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("symbol", "") + ) + name: Optional[str] = Field(default=None, description="Name of the company.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/equity_short_interest.py b/openbb_platform/core/openbb_core/provider/standard_models/equity_short_interest.py new file mode 100644 index 0000000000000000000000000000000000000000..0f1679ad1e8cda5cb0e10d75731e5800dae030f3 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/equity_short_interest.py @@ -0,0 +1,79 @@ +"""Equity Short Interest Standard Model.""" + +from datetime import date as dateType + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class ShortInterestQueryParams(QueryParams): + """Equity Short Interest Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + +class ShortInterestData(Data): + """Equity Short Interest Data.""" + + settlement_date: dateType = Field( + description=( + "The mid-month short interest report is based on short positions held by " + "members on the settlement date of the 15th of each month. If the 15th falls " + "on a weekend or another non-settlement date, the designated settlement date " + "will be the previous business day on which transactions settled. The " + "end-of-month short interest report is based on short positions held on the " + "last business day of the month on which transactions settle. Once the short " + "position reports are received, the short interest data is compiled for each " + "equity security and provided for publication on the 7th business day after " + "the reporting settlement date." + ) + ) + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + issue_name: str = Field(description="Unique identifier of the issue.") + market_class: str = Field(description="Primary listing market.") + current_short_position: float = Field( + description=( + "The total number of shares in the issue that are reflected on the books " + "and records of the reporting firms as short as defined by Rule 200 of " + "Regulation SHO as of the current cycle’s designated settlement date." + ) + ) + previous_short_position: float = Field( + description=( + "The total number of shares in the issue that are reflected on the books " + "and records of the reporting firms as short as defined by Rule 200 of " + "Regulation SHO as of the previous cycle’s designated settlement date." + ) + ) + avg_daily_volume: float = Field( + description=( + "Total Volume or Adjusted Volume in case of splits / Total trade days " + "between (previous settlement date + 1) to (current settlement date). The " + "NULL values are translated as zero." + ) + ) + + days_to_cover: float = Field( + description=( + "The number of days of average share volume it would require to buy all of " + "the shares that were sold short during the reporting cycle. Formula: Short " + "Interest / Average Daily Share Volume, Rounded to Hundredths. 1.00 will be " + "displayed for any values equal or less than 1 (i.e., Average Daily Share is " + "equal to or greater than Short Interest). N/A will be displayed If the days " + "to cover is Zero (i.e., Average Daily Share Volume is Zero)." + ) + ) + change: float = Field( + description=( + "Change in Shares Short from Previous Cycle: Difference in short interest " + "between the current cycle and the previous cycle." + ) + ) + change_pct: float = Field( + description="Change in Shares Short from Previous Cycle as a percent." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/equity_valuation_multiples.py b/openbb_platform/core/openbb_core/provider/standard_models/equity_valuation_multiples.py new file mode 100644 index 0000000000000000000000000000000000000000..5f9d364accf09f7b19c24aab042fb27417130f5f --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/equity_valuation_multiples.py @@ -0,0 +1,262 @@ +"""Equity Valuation Multiples Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class EquityValuationMultiplesQueryParams(QueryParams): + """Equity Valuation Multiples Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class EquityValuationMultiplesData(Data): + """Equity Valuation Multiples Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + revenue_per_share_ttm: Optional[float] = Field( + default=None, + description="Revenue per share calculated as trailing twelve months.", + ) + net_income_per_share_ttm: Optional[float] = Field( + default=None, + description="Net income per share calculated as trailing twelve months.", + ) + operating_cash_flow_per_share_ttm: Optional[float] = Field( + default=None, + description="Operating cash flow per share calculated as trailing twelve months.", + ) + free_cash_flow_per_share_ttm: Optional[float] = Field( + default=None, + description="Free cash flow per share calculated as trailing twelve months.", + ) + cash_per_share_ttm: Optional[float] = Field( + default=None, description="Cash per share calculated as trailing twelve months." + ) + book_value_per_share_ttm: Optional[float] = Field( + default=None, + description="Book value per share calculated as trailing twelve months.", + ) + tangible_book_value_per_share_ttm: Optional[float] = Field( + default=None, + description="Tangible book value per share calculated as trailing twelve months.", + ) + shareholders_equity_per_share_ttm: Optional[float] = Field( + default=None, + description="Shareholders equity per share calculated as trailing twelve months.", + ) + interest_debt_per_share_ttm: Optional[float] = Field( + default=None, + description="Interest debt per share calculated as trailing twelve months.", + ) + market_cap_ttm: Optional[float] = Field( + default=None, + description="Market capitalization calculated as trailing twelve months.", + ) + enterprise_value_ttm: Optional[float] = Field( + default=None, + description="Enterprise value calculated as trailing twelve months.", + ) + pe_ratio_ttm: Optional[float] = Field( + default=None, + description="Price-to-earnings ratio (P/E ratio) calculated as trailing twelve months.", + ) + price_to_sales_ratio_ttm: Optional[float] = Field( + default=None, + description="Price-to-sales ratio calculated as trailing twelve months.", + ) + pocf_ratio_ttm: Optional[float] = Field( + default=None, + description="Price-to-operating cash flow ratio calculated as trailing twelve months.", + ) + pfcf_ratio_ttm: Optional[float] = Field( + default=None, + description="Price-to-free cash flow ratio calculated as trailing twelve months.", + ) + pb_ratio_ttm: Optional[float] = Field( + default=None, + description="Price-to-book ratio calculated as trailing twelve months.", + ) + ptb_ratio_ttm: Optional[float] = Field( + default=None, + description="Price-to-tangible book ratio calculated as trailing twelve months.", + ) + ev_to_sales_ttm: Optional[float] = Field( + default=None, + description="Enterprise value-to-sales ratio calculated as trailing twelve months.", + ) + enterprise_value_over_ebitda_ttm: Optional[float] = Field( + default=None, + description="Enterprise value-to-EBITDA ratio calculated as trailing twelve months.", + ) + ev_to_operating_cash_flow_ttm: Optional[float] = Field( + default=None, + description="Enterprise value-to-operating cash flow ratio calculated as trailing twelve months.", + ) + ev_to_free_cash_flow_ttm: Optional[float] = Field( + default=None, + description="Enterprise value-to-free cash flow ratio calculated as trailing twelve months.", + ) + earnings_yield_ttm: Optional[float] = Field( + default=None, description="Earnings yield calculated as trailing twelve months." + ) + free_cash_flow_yield_ttm: Optional[float] = Field( + default=None, + description="Free cash flow yield calculated as trailing twelve months.", + ) + debt_to_equity_ttm: Optional[float] = Field( + default=None, + description="Debt-to-equity ratio calculated as trailing twelve months.", + ) + debt_to_assets_ttm: Optional[float] = Field( + default=None, + description="Debt-to-assets ratio calculated as trailing twelve months.", + ) + net_debt_to_ebitda_ttm: Optional[float] = Field( + default=None, + description="Net debt-to-EBITDA ratio calculated as trailing twelve months.", + ) + current_ratio_ttm: Optional[float] = Field( + default=None, description="Current ratio calculated as trailing twelve months." + ) + interest_coverage_ttm: Optional[float] = Field( + default=None, + description="Interest coverage calculated as trailing twelve months.", + ) + income_quality_ttm: Optional[float] = Field( + default=None, description="Income quality calculated as trailing twelve months." + ) + dividend_yield_ttm: Optional[float] = Field( + default=None, description="Dividend yield calculated as trailing twelve months." + ) + dividend_yield_percentage_ttm: Optional[float] = Field( + default=None, + description="Dividend yield percentage calculated as trailing twelve months.", + ) + dividend_to_market_cap_ttm: Optional[float] = Field( + default=None, + description="Dividend to market capitalization ratio calculated as trailing twelve months.", + ) + dividend_per_share_ttm: Optional[float] = Field( + default=None, + description="Dividend per share calculated as trailing twelve months.", + ) + payout_ratio_ttm: Optional[float] = Field( + default=None, description="Payout ratio calculated as trailing twelve months." + ) + sales_general_and_administrative_to_revenue_ttm: Optional[float] = Field( + default=None, + description="Sales general and administrative expenses-to-revenue ratio calculated as trailing twelve months.", + ) + research_and_development_to_revenue_ttm: Optional[float] = Field( + default=None, + description="Research and development expenses-to-revenue ratio calculated as trailing twelve months.", + ) + intangibles_to_total_assets_ttm: Optional[float] = Field( + default=None, + description="Intangibles-to-total assets ratio calculated as trailing twelve months.", + ) + capex_to_operating_cash_flow_ttm: Optional[float] = Field( + default=None, + description="Capital expenditures-to-operating cash flow ratio calculated as trailing twelve months.", + ) + capex_to_revenue_ttm: Optional[float] = Field( + default=None, + description="Capital expenditures-to-revenue ratio calculated as trailing twelve months.", + ) + capex_to_depreciation_ttm: Optional[float] = Field( + default=None, + description="Capital expenditures-to-depreciation ratio calculated as trailing twelve months.", + ) + stock_based_compensation_to_revenue_ttm: Optional[float] = Field( + default=None, + description="Stock-based compensation-to-revenue ratio calculated as trailing twelve months.", + ) + graham_number_ttm: Optional[float] = Field( + default=None, description="Graham number calculated as trailing twelve months." + ) + roic_ttm: Optional[float] = Field( + default=None, + description="Return on invested capital calculated as trailing twelve months.", + ) + return_on_tangible_assets_ttm: Optional[float] = Field( + default=None, + description="Return on tangible assets calculated as trailing twelve months.", + ) + graham_net_net_ttm: Optional[float] = Field( + default=None, + description="Graham net-net working capital calculated as trailing twelve months.", + ) + working_capital_ttm: Optional[float] = Field( + default=None, + description="Working capital calculated as trailing twelve months.", + ) + tangible_asset_value_ttm: Optional[float] = Field( + default=None, + description="Tangible asset value calculated as trailing twelve months.", + ) + net_current_asset_value_ttm: Optional[float] = Field( + default=None, + description="Net current asset value calculated as trailing twelve months.", + ) + invested_capital_ttm: Optional[float] = Field( + default=None, + description="Invested capital calculated as trailing twelve months.", + ) + average_receivables_ttm: Optional[float] = Field( + default=None, + description="Average receivables calculated as trailing twelve months.", + ) + average_payables_ttm: Optional[float] = Field( + default=None, + description="Average payables calculated as trailing twelve months.", + ) + average_inventory_ttm: Optional[float] = Field( + default=None, + description="Average inventory calculated as trailing twelve months.", + ) + days_sales_outstanding_ttm: Optional[float] = Field( + default=None, + description="Days sales outstanding calculated as trailing twelve months.", + ) + days_payables_outstanding_ttm: Optional[float] = Field( + default=None, + description="Days payables outstanding calculated as trailing twelve months.", + ) + days_of_inventory_on_hand_ttm: Optional[float] = Field( + default=None, + description="Days of inventory on hand calculated as trailing twelve months.", + ) + receivables_turnover_ttm: Optional[float] = Field( + default=None, + description="Receivables turnover calculated as trailing twelve months.", + ) + payables_turnover_ttm: Optional[float] = Field( + default=None, + description="Payables turnover calculated as trailing twelve months.", + ) + inventory_turnover_ttm: Optional[float] = Field( + default=None, + description="Inventory turnover calculated as trailing twelve months.", + ) + roe_ttm: Optional[float] = Field( + default=None, + description="Return on equity calculated as trailing twelve months.", + ) + capex_per_share_ttm: Optional[float] = Field( + default=None, + description="Capital expenditures per share calculated as trailing twelve months.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/esg_risk_rating.py b/openbb_platform/core/openbb_core/provider/standard_models/esg_risk_rating.py new file mode 100644 index 0000000000000000000000000000000000000000..57c869e4c578cde847ad9deda5c712db072e7535 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/esg_risk_rating.py @@ -0,0 +1,45 @@ +"""ESG Risk Rating Standard Model.""" + +from typing import List, Literal, Set, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class ESGRiskRatingQueryParams(QueryParams): + """ESG Risk Rating Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class ESGRiskRatingData(Data): + """ESG Risk Rating Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + cik: str = Field(description=DATA_DESCRIPTIONS.get("cik", "")) + company_name: str = Field(description="Company name of the company.") + industry: str = Field(description="Industry of the company.") + year: int = Field(description="Year of the ESG risk rating.") + esg_risk_rating: Literal[ + "A+", "A", "A-", "B+", "B", "B-", "C+", "C", "C-", "D+", "D", "D-", "F" + ] = Field(description="ESG risk rating of the company.") + industry_rank: str = Field(description="Industry rank of the company.") + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: Union[str, List[str], Set[str]]): + """Convert field to uppercase.""" + if isinstance(v, str): + return v.upper() + return ",".join([symbol.upper() for symbol in list(v)]) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/esg_score.py b/openbb_platform/core/openbb_core/provider/standard_models/esg_score.py new file mode 100644 index 0000000000000000000000000000000000000000..680f2a7a04df8b192900c3f57ae21bb877af8bf5 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/esg_score.py @@ -0,0 +1,53 @@ +"""ESG Score Standard Model.""" + +from datetime import ( + date as dateType, + datetime, +) +from typing import List, Set, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class ESGScoreQueryParams(QueryParams): + """ESG Score Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class ESGScoreData(Data): + """ESG Score Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + cik: str = Field(description=DATA_DESCRIPTIONS.get("cik", "")) + company_name: str = Field(description="Company name of the company.") + form_type: str = Field(description="Form type of the company.") + accepted_date: datetime = Field(description="Accepted date of the company.") + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + environmental_score: float = Field( + description="Environmental score of the company." + ) + social_score: float = Field(description="Social score of the company.") + governance_score: float = Field(description="Governance score of the company.") + esg_score: float = Field(description="ESG score of the company.") + url: str = Field(description="URL of the company.") + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: Union[str, List[str], Set[str]]): + """Convert field to uppercase.""" + if isinstance(v, str): + return v.upper() + return ",".join([symbol.upper() for symbol in list(v)]) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/esg_sector.py b/openbb_platform/core/openbb_core/provider/standard_models/esg_sector.py new file mode 100644 index 0000000000000000000000000000000000000000..1d3d39a45e9a10fc28a52f7d44a2261c0e06e828 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/esg_sector.py @@ -0,0 +1,43 @@ +"""ESG Sector Standard Model.""" + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams + + +class ESGSectorQueryParams(QueryParams): + """ESG Sector Query. + + Parameter + --------- + year : int + The year to get ESG information for + """ + + year: int + + +class ESGSectorData(Data): + """ESG Sector Data. + + Returns + ------- + year : int + The year of the ESG Sector. + sector : str + The sector of the ESG Sector. + environmental_score : float + The environmental score of the ESG Sector. + social_score : float + The social score of the ESG Sector. + governance_score : float + The governance score of the ESG Sector. + esg_score : float + The ESG score of the ESG Sector. + """ + + year: int + sector: str + environmental_score: float + social_score: float + governance_score: float + esg_score: float diff --git a/openbb_platform/core/openbb_core/provider/standard_models/etf_countries.py b/openbb_platform/core/openbb_core/provider/standard_models/etf_countries.py new file mode 100644 index 0000000000000000000000000000000000000000..e6af032eb9d009cd7d0bd01cec714abd7b4c12d6 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/etf_countries.py @@ -0,0 +1,26 @@ +"""ETF Countries Standard Model.""" + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field, field_validator + + +class EtfCountriesQueryParams(QueryParams): + """ETF Countries Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "") + " (ETF)") + + @field_validator("symbol") + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class EtfCountriesData(Data): + """ETF Countries Data.""" + + country: str = Field( + description="The country of the exposure. Corresponding values are normalized percentage points." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/etf_equity_exposure.py b/openbb_platform/core/openbb_core/provider/standard_models/etf_equity_exposure.py new file mode 100644 index 0000000000000000000000000000000000000000..36ae009c246358a636b16e21538f0e0660b7ba6b --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/etf_equity_exposure.py @@ -0,0 +1,42 @@ +"""ETF Equity Exposure Standard Model.""" + +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field, field_validator + + +class EtfEquityExposureQueryParams(QueryParams): + """ETF Equity Exposure Query Params.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "") + " (Stock)") + + @field_validator("symbol") + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class EtfEquityExposureData(Data): + """ETF Equity Exposure Data.""" + + equity_symbol: str = Field(description="The symbol of the equity requested.") + etf_symbol: str = Field( + description="The symbol of the ETF with exposure to the requested equity." + ) + shares: Optional[float] = Field( + default=None, + description="The number of shares held in the ETF.", + ) + weight: Optional[float] = Field( + default=None, + description="The weight of the equity in the ETF, as a normalized percent.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + market_value: Optional[Union[int, float]] = Field( + default=None, + description="The market value of the equity position in the ETF.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/etf_historical.py b/openbb_platform/core/openbb_core/provider/standard_models/etf_historical.py new file mode 100644 index 0000000000000000000000000000000000000000..5c5300e0bd5c3532406785338b64fa3b5defbfed --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/etf_historical.py @@ -0,0 +1,58 @@ +"""ETF Historical Price Standard Model.""" + +from datetime import ( + date as dateType, + datetime, +) +from typing import Optional, Union + +from dateutil import parser +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, NonNegativeInt, PositiveFloat, field_validator + + +class EtfHistoricalQueryParams(QueryParams): + """ETF Historical Price Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "") + " (ETF)") + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase and remove '-'.""" + return v.upper() + + +class EtfHistoricalData(Data): + """ETF Historical Price Data.""" + + date: Union[dateType, datetime] = Field( + description=DATA_DESCRIPTIONS.get("date", "") + ) + open: PositiveFloat = Field(description=DATA_DESCRIPTIONS.get("open", "")) + high: PositiveFloat = Field(description=DATA_DESCRIPTIONS.get("high", "")) + low: PositiveFloat = Field(description=DATA_DESCRIPTIONS.get("low", "")) + close: PositiveFloat = Field(description=DATA_DESCRIPTIONS.get("close", "")) + volume: Optional[NonNegativeInt] = Field( + description=DATA_DESCRIPTIONS.get("volume", "") + ) + + @field_validator("date", mode="before", check_fields=False) + def date_validate(cls, v): # pylint: disable=E0213 + """Return formatted datetime.""" + if ":" in str(v): + return parser.isoparse(str(v)) + return parser.parse(str(v)).date() diff --git a/openbb_platform/core/openbb_core/provider/standard_models/etf_historical_nav.py b/openbb_platform/core/openbb_core/provider/standard_models/etf_historical_nav.py new file mode 100644 index 0000000000000000000000000000000000000000..ae2425cd560fa1f9f455bb0a52032d58f5db98db --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/etf_historical_nav.py @@ -0,0 +1,30 @@ +"""ETF Historical NAV model.""" + +from datetime import date as dateType + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class EtfHistoricalNavQueryParams(QueryParams): + """ETF Historical NAV Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol") + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class EtfHistoricalNavData(Data): + """ETF Historical NAV Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + nav: float = Field(description="The net asset value on the date.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/etf_holdings.py b/openbb_platform/core/openbb_core/provider/standard_models/etf_holdings.py new file mode 100644 index 0000000000000000000000000000000000000000..b591f769273ec44cdbf9c1ed2d1c0131a90c7da8 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/etf_holdings.py @@ -0,0 +1,35 @@ +"""ETF Holdings Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class EtfHoldingsQueryParams(QueryParams): + """ETF Holdings Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "") + " (ETF)") + + @field_validator("symbol") + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class EtfHoldingsData(Data): + """ETF Holdings Data.""" + + symbol: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("symbol", "") + " (ETF)" + ) + name: Optional[str] = Field( + default=None, + description="Name of the ETF holding.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/etf_holdings_date.py b/openbb_platform/core/openbb_core/provider/standard_models/etf_holdings_date.py new file mode 100644 index 0000000000000000000000000000000000000000..f85786332319156b4af4b32e9f97c4ac44ec049e --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/etf_holdings_date.py @@ -0,0 +1,23 @@ +"""ETF Holdings Date Standard Model.""" + +from datetime import date as dateType + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class EtfHoldingsDateQueryParams(QueryParams): + """ETF Holdings Date Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "") + " (ETF)") + + +class EtfHoldingsDateData(Data): + """ETF Holdings Date Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date")) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/etf_info.py b/openbb_platform/core/openbb_core/provider/standard_models/etf_info.py new file mode 100644 index 0000000000000000000000000000000000000000..ccc149c1919f9c483ce7762eac168cc2b4035a3e --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/etf_info.py @@ -0,0 +1,34 @@ +"""ETF Info Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class EtfInfoQueryParams(QueryParams): + """ETF Info Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "") + " (ETF)") + + @field_validator("symbol") + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class EtfInfoData(Data): + """ETF Info Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "") + " (ETF)") + name: Optional[str] = Field(description="Name of the ETF.") + description: Optional[str] = Field( + default=None, description="Description of the fund." + ) + inception_date: Optional[str] = Field(description="Inception date of the ETF.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/etf_performance.py b/openbb_platform/core/openbb_core/provider/standard_models/etf_performance.py new file mode 100644 index 0000000000000000000000000000000000000000..ad048377582d5a4383308b89b12fb3161987958a --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/etf_performance.py @@ -0,0 +1,57 @@ +"""ETF Performance Standard Model.""" + +from datetime import date as dateType +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class ETFPerformanceQueryParams(QueryParams): + """ETF Performance Query.""" + + sort: Literal["asc", "desc"] = Field( + default="desc", + description="Sort order. Possible values: 'asc', 'desc'. Default: 'desc'.", + ) + limit: int = Field( + default=10, + description=QUERY_DESCRIPTIONS.get("limit", ""), + ) + + @field_validator("sort", mode="before", check_fields=False) + @classmethod + def to_lower(cls, v: Optional[str]) -> Optional[str]: + """Convert field to lowercase.""" + return v.lower() if v else v + + +class ETFPerformanceData(Data): + """ETF Performance Data.""" + + symbol: str = Field( + description=DATA_DESCRIPTIONS.get("symbol", ""), + ) + name: str = Field( + description="Name of the entity.", + ) + last_price: float = Field( + description="Last price.", + ) + percent_change: float = Field( + description="Percent change.", + ) + net_change: float = Field( + description="Net change.", + ) + volume: float = Field( + description=DATA_DESCRIPTIONS.get("volume", ""), + ) + date: dateType = Field( + description=DATA_DESCRIPTIONS.get("date", ""), + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/etf_search.py b/openbb_platform/core/openbb_core/provider/standard_models/etf_search.py new file mode 100644 index 0000000000000000000000000000000000000000..e0c217f4c171cb15bb783abaa69757cb308275bf --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/etf_search.py @@ -0,0 +1,21 @@ +"""ETF Search Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class EtfSearchQueryParams(QueryParams): + """ETF Search Query.""" + + query: Optional[str] = Field(description="Search query.", default="") + + +class EtfSearchData(Data): + """ETF Search Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "") + "(ETF)") + name: Optional[str] = Field(description="Name of the ETF.", default=None) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/etf_sectors.py b/openbb_platform/core/openbb_core/provider/standard_models/etf_sectors.py new file mode 100644 index 0000000000000000000000000000000000000000..ef11759e3d8af400917c5e3628d6148b3e61d2e7 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/etf_sectors.py @@ -0,0 +1,29 @@ +"""ETF Sectors Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field, field_validator + + +class EtfSectorsQueryParams(QueryParams): + """ETF Sectors Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "") + " (ETF)") + + @field_validator("symbol") + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class EtfSectorsData(Data): + """ETF Sectors Data.""" + + sector: str = Field(description="Sector of exposure.") + weight: Optional[float] = Field( + description="Exposure of the ETF to the sector in normalized percentage points." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/euro_short_term_rate.py b/openbb_platform/core/openbb_core/provider/standard_models/euro_short_term_rate.py new file mode 100644 index 0000000000000000000000000000000000000000..d028f289fcdda739725e309d68db4c4627e24a95 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/euro_short_term_rate.py @@ -0,0 +1,66 @@ +"""Euro Short Term Rate Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class EuroShortTermRateQueryParams(QueryParams): + """Euro Short Term Rate Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class EuroShortTermRateData(Data): + """Euro Short Term Rate Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + rate: float = Field( + description="Volume-weighted trimmed mean rate.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percentile_25: Optional[float] = Field( + default=None, + description="Rate at 25th percentile of volume.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percentile_75: Optional[float] = Field( + default=None, + description="Rate at 75th percentile of volume.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + volume: Optional[float] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("volume", "") + " (Millions of €EUR).", + json_schema_extra={ + "x-unit_measurement": "currency", + "x-frontend_multiply": 1e6, + }, + ) + transactions: Optional[int] = Field( + default=None, + description="Number of transactions.", + ) + number_of_banks: Optional[int] = Field( + default=None, + description="Number of active banks.", + ) + large_bank_share_of_volume: Optional[float] = Field( + default=None, + description="The percent of volume attributable to the 5 largest active banks.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/executive_compensation.py b/openbb_platform/core/openbb_core/provider/standard_models/executive_compensation.py new file mode 100644 index 0000000000000000000000000000000000000000..9e871bdf14f0e670ef319369cd6582f860dc4f94 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/executive_compensation.py @@ -0,0 +1,58 @@ +"""Executive Compensation Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, NonNegativeFloat, field_validator + + +class ExecutiveCompensationQueryParams(QueryParams): + """Executive Compensation Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class ExecutiveCompensationData(Data): + """Executive Compensation Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + cik: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("cik", "") + ) + company_name: Optional[str] = Field( + default=None, description="The name of the company." + ) + industry: Optional[str] = Field( + default=None, description="The industry of the company." + ) + year: Optional[int] = Field(default=None, description="Year of the compensation.") + name_and_position: Optional[str] = Field( + default=None, description="Name and position." + ) + salary: Optional[NonNegativeFloat] = Field(default=None, description="Salary.") + bonus: Optional[NonNegativeFloat] = Field( + default=None, description="Bonus payments." + ) + stock_award: Optional[NonNegativeFloat] = Field( + default=None, description="Stock awards." + ) + incentive_plan_compensation: Optional[NonNegativeFloat] = Field( + default=None, description="Incentive plan compensation." + ) + all_other_compensation: Optional[NonNegativeFloat] = Field( + default=None, description="All other compensation." + ) + total: Optional[NonNegativeFloat] = Field( + default=None, description="Total compensation." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/export_destinations.py b/openbb_platform/core/openbb_core/provider/standard_models/export_destinations.py new file mode 100644 index 0000000000000000000000000000000000000000..7fa1da1390d9c52d5d9a99c117ee578944c95ab0 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/export_destinations.py @@ -0,0 +1,29 @@ +"""Export Destinations Standard Model.""" + +from typing import Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field + + +class ExportDestinationsQueryParams(QueryParams): + """Export Destinations Query.""" + + country: str = Field(description=QUERY_DESCRIPTIONS.get("country", "")) + + +class ExportDestinationsData(Data): + """Export Destinations Data.""" + + origin_country: str = Field( + description="The country of origin.", + ) + destination_country: str = Field( + description="The destination country.", + ) + value: Union[float, int] = Field( + description="The value of the export.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/fed_projections.py b/openbb_platform/core/openbb_core/provider/standard_models/fed_projections.py new file mode 100644 index 0000000000000000000000000000000000000000..b87c9e72632778ef926ea706543931d83262ec4a --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/fed_projections.py @@ -0,0 +1,32 @@ +"""PROJECTION Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class PROJECTIONQueryParams(QueryParams): + """PROJECTION Query.""" + + +class PROJECTIONData(Data): + """PROJECTION Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + range_high: Optional[float] = Field(description="High projection of rates.") + central_tendency_high: Optional[float] = Field( + description="Central tendency of high projection of rates." + ) + median: Optional[float] = Field(description="Median projection of rates.") + range_midpoint: Optional[float] = Field(description="Midpoint projection of rates.") + central_tendency_midpoint: Optional[float] = Field( + description="Central tendency of midpoint projection of rates." + ) + range_low: Optional[float] = Field(description="Low projection of rates.") + central_tendency_low: Optional[float] = Field( + description="Central tendency of low projection of rates." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/federal_funds_rate.py b/openbb_platform/core/openbb_core/provider/standard_models/federal_funds_rate.py new file mode 100644 index 0000000000000000000000000000000000000000..06e04df8033077602ea1ce9b966508abc3835f33 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/federal_funds_rate.py @@ -0,0 +1,74 @@ +"""Federal Funds Rate Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class FederalFundsRateQueryParams(QueryParams): + """Federal Funds Rate Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class FederalFundsRateData(Data): + """Federal Funds Rate Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + rate: float = Field( + description="Effective federal funds rate.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + target_range_upper: Optional[float] = Field( + default=None, + description="Upper bound of the target range.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + target_range_lower: Optional[float] = Field( + default=None, + description="Lower bound of the target range.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percentile_1: Optional[float] = Field( + default=None, + description="1st percentile of the distribution.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percentile_25: Optional[float] = Field( + default=None, + description="25th percentile of the distribution.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percentile_75: Optional[float] = Field( + default=None, + description="75th percentile of the distribution.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percentile_99: Optional[float] = Field( + default=None, + description="99th percentile of the distribution.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + volume: Optional[float] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("volume", "") + + "The notional volume of transactions (Billions of $).", + json_schema_extra={ + "x-unit_measurement": "currency", + "x-frontend_multiply": 1e9, + }, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/ffrmc.py b/openbb_platform/core/openbb_core/provider/standard_models/ffrmc.py new file mode 100644 index 0000000000000000000000000000000000000000..203d34a7f45ada5d2e2f2dd5b15f75ac64e35451 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/ffrmc.py @@ -0,0 +1,46 @@ +"""Selected Treasury Constant Maturity Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class SelectedTreasuryConstantMaturityQueryParams(QueryParams): + """Selected Treasury Constant Maturity Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + maturity: Optional[Literal["10y", "5y", "1y", "6m", "3m"]] = Field( + default="10y", + description="The maturity", + ) + + @field_validator("maturity", mode="before", check_fields=False) + @classmethod + def to_lower(cls, v: Optional[str]) -> Optional[str]: + """Convert field to lowercase.""" + return v.lower() if v else v + + +class SelectedTreasuryConstantMaturityData(Data): + """Selected Treasury Constant Maturity Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + rate: Optional[float] = Field( + description="Selected Treasury Constant Maturity Rate." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/financial_attributes.py b/openbb_platform/core/openbb_core/provider/standard_models/financial_attributes.py new file mode 100644 index 0000000000000000000000000000000000000000..067a682b6bd9964e7fce4cf0eacd56b5e522e265 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/financial_attributes.py @@ -0,0 +1,50 @@ +"""Financial Attributes Standard Model.""" + +from datetime import date as dateType +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class FinancialAttributesQueryParams(QueryParams): + """Financial Attributes Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol")) + tag: str = Field(description=QUERY_DESCRIPTIONS.get("tag")) + period: Optional[Literal["annual", "quarter"]] = Field( + default="annual", description=QUERY_DESCRIPTIONS.get("period") + ) + limit: Optional[int] = Field( + default=1000, description=QUERY_DESCRIPTIONS.get("limit") + ) + type: Optional[str] = Field( + default=None, description="Filter by type, when applicable." + ) + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date") + ) + sort: Optional[Literal["asc", "desc"]] = Field( + default="desc", description="Sort order." + ) + + @field_validator("period", "sort", mode="before", check_fields=False) + @classmethod + def to_lower(cls, v: Optional[str]) -> Optional[str]: + """Convert field to lowercase.""" + return v.lower() if v else v + + +class FinancialAttributesData(Data): + """Financial Attributes Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date")) + value: Optional[float] = Field(default=None, description="The value of the data.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/financial_ratios.py b/openbb_platform/core/openbb_core/provider/standard_models/financial_ratios.py new file mode 100644 index 0000000000000000000000000000000000000000..fc19cb9efe446fb9722296100763169d7435ff76 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/financial_ratios.py @@ -0,0 +1,34 @@ +"""Financial Ratios Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, NonNegativeInt, field_validator + + +class FinancialRatiosQueryParams(QueryParams): + """Financial Ratios Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + limit: NonNegativeInt = Field( + default=12, description=QUERY_DESCRIPTIONS.get("limit", "") + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str): + """Convert field to uppercase.""" + return v.upper() + + +class FinancialRatiosData(Data): + """Financial Ratios Standard Model.""" + + period_ending: str = Field(description=DATA_DESCRIPTIONS.get("date", "")) + fiscal_period: str = Field(description="Period of the financial ratios.") + fiscal_year: Optional[int] = Field(default=None, description="Fiscal year.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/form_13FHR.py b/openbb_platform/core/openbb_core/provider/standard_models/form_13FHR.py new file mode 100644 index 0000000000000000000000000000000000000000..585fedf08530cd00d37874e60a152ab697d5c92a --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/form_13FHR.py @@ -0,0 +1,109 @@ +"""From 13F-HR Standard Model.""" + +from datetime import date as dateType +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class Form13FHRQueryParams(QueryParams): + """Form 13F-HR Query.""" + + symbol: str = Field( + description=QUERY_DESCRIPTIONS.get("symbol", "") + + " A CIK or Symbol can be used." + ) + date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("date", "") + + " The date represents the end of the reporting period." + + " All form 13F-HR filings are based on the calendar year" + + " and are reported quarterly." + + " If a date is not supplied, the most recent filing is returned." + + " Submissions beginning 2013-06-30 are supported.", + ) + limit: Optional[int] = Field( + default=1, + description=QUERY_DESCRIPTIONS.get("limit", "") + + " The number of previous filings to return." + + " The date parameter takes priority over this parameter.", + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str): + """Convert field to uppercase.""" + return str(v).upper() + + +class Form13FHRData(Data): + """ + Form 13F-HR Data. + + Detailed documentation of the filing can be found here: + https://www.sec.gov/pdf/form13f.pdf + """ + + period_ending: dateType = Field( + description="The end-of-quarter date of the filing." + ) + issuer: str = Field(description="The name of the issuer.") + cusip: str = Field(description="The CUSIP of the security.") + asset_class: str = Field( + description="The title of the asset class for the security." + ) + security_type: Optional[Literal["SH", "PRN"]] = Field( + default=None, + description="Whether the principal amount represents the number of shares" + + " or the principal amount of such class." + + " 'SH' for shares. 'PRN' for principal amount." + + " Convertible debt securities are reported as 'PRN'.", + ) + option_type: Optional[Literal["call", "put"]] = Field( + default=None, + description="Defined when the holdings being reported are put or call options." + + " Only long positions are reported.", + ) + investment_discretion: Optional[str] = Field( + default=None, + description="The investment discretion held by the Manager." + + " Sole, shared-defined (DFN), or shared-other (OTR).", + ) + voting_authority_sole: Optional[int] = Field( + default=None, + description="The number of shares for which the Manager" + + " exercises sole voting authority.", + ) + voting_authority_shared: Optional[int] = Field( + default=None, + description="The number of shares for which the Manager" + + " exercises a defined shared voting authority.", + ) + voting_authority_none: Optional[int] = Field( + default=None, + description="The number of shares for which the Manager" + + " exercises no voting authority.", + ) + principal_amount: int = Field( + description="The total number of shares of the class of security" + + " or the principal amount of such class. Defined by the 'security_type'." + + " Only long positions are reported" + ) + value: int = Field( + description="The fair market value of the holding of the particular class of security." + + " The value reported for options is the fair market value of the underlying security" + + " with respect to the number of shares controlled." + + " Values are rounded to the nearest US dollar" + + " and use the closing price of the last trading day of the calendar year or quarter.", + ) + + @field_validator("option_type", mode="before", check_fields=False) + @classmethod + def validate_option_type(cls, v: str): + """Validate and convert to lower case.""" + return v.lower() if v else None diff --git a/openbb_platform/core/openbb_core/provider/standard_models/forward_ebitda_estimates.py b/openbb_platform/core/openbb_core/provider/standard_models/forward_ebitda_estimates.py new file mode 100644 index 0000000000000000000000000000000000000000..478b39e32fc8f86ad567045382659515ba13bc11 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/forward_ebitda_estimates.py @@ -0,0 +1,74 @@ +"""Forward EBITDA Estimates Standard Model.""" + +from datetime import date as dateType +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data, ForceInt +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class ForwardEbitdaEstimatesQueryParams(QueryParams): + """Forward EBITDA Estimates Query Parameters.""" + + symbol: Optional[str] = Field( + default=None, + description=QUERY_DESCRIPTIONS["symbol"], + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v): + """Convert field to uppercase.""" + return v.upper() if v else None + + +class ForwardEbitdaEstimatesData(Data): + """Forward EBITDA Estimates Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + name: Optional[str] = Field(default=None, description="Name of the entity.") + last_updated: Optional[dateType] = Field( + default=None, + description="The date of the last update.", + ) + period_ending: Optional[dateType] = Field( + default=None, + description="The end date of the reporting period.", + ) + fiscal_year: Optional[int] = Field( + default=None, description="Fiscal year for the estimate." + ) + fiscal_period: Optional[str] = Field( + default=None, description="Fiscal quarter for the estimate." + ) + calendar_year: Optional[int] = Field( + default=None, description="Calendar year for the estimate." + ) + calendar_period: Optional[Union[int, str]] = Field( + default=None, description="Calendar quarter for the estimate." + ) + low_estimate: Optional[ForceInt] = Field( + default=None, description="The EBITDA estimate low for the period." + ) + high_estimate: Optional[ForceInt] = Field( + default=None, description="The EBITDA estimate high for the period." + ) + mean: Optional[ForceInt] = Field( + default=None, description="The EBITDA estimate mean for the period." + ) + median: Optional[ForceInt] = Field( + default=None, description="The EBITDA estimate median for the period." + ) + standard_deviation: Optional[ForceInt] = Field( + default=None, + description="The EBITDA estimate standard deviation for the period.", + ) + number_of_analysts: Optional[int] = Field( + default=None, + description="Number of analysts providing estimates for the period.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/forward_eps_estimates.py b/openbb_platform/core/openbb_core/provider/standard_models/forward_eps_estimates.py new file mode 100644 index 0000000000000000000000000000000000000000..3988f267513f7e25de9e2c858e81165627423144 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/forward_eps_estimates.py @@ -0,0 +1,66 @@ +"""Forward EPS Estimates Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class ForwardEpsEstimatesQueryParams(QueryParams): + """Forward EPS Estimates Query Parameters.""" + + symbol: Optional[str] = Field( + default=None, + description=QUERY_DESCRIPTIONS["symbol"], + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v): + """Convert field to uppercase.""" + return v.upper() if v else None + + +class ForwardEpsEstimatesData(Data): + """Forward EPS Estimates Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + name: Optional[str] = Field(default=None, description="Name of the entity.") + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + fiscal_year: Optional[int] = Field( + default=None, description="Fiscal year for the estimate." + ) + fiscal_period: Optional[str] = Field( + default=None, description="Fiscal quarter for the estimate." + ) + calendar_year: Optional[int] = Field( + default=None, description="Calendar year for the estimate." + ) + calendar_period: Optional[str] = Field( + default=None, description="Calendar quarter for the estimate." + ) + low_estimate: Optional[float] = Field( + default=None, description="Estimated EPS low for the period." + ) + high_estimate: Optional[float] = Field( + default=None, description="Estimated EPS high for the period." + ) + mean: Optional[float] = Field( + default=None, description="Estimated EPS mean for the period." + ) + median: Optional[float] = Field( + default=None, description="Estimated EPS median for the period." + ) + standard_deviation: Optional[float] = Field( + default=None, description="Estimated EPS standard deviation for the period." + ) + number_of_analysts: Optional[int] = Field( + default=None, + description="Number of analysts providing estimates for the period.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/forward_pe_estimates.py b/openbb_platform/core/openbb_core/provider/standard_models/forward_pe_estimates.py new file mode 100644 index 0000000000000000000000000000000000000000..c1baf6d67730267552365039ec879b4a036c0cdc --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/forward_pe_estimates.py @@ -0,0 +1,53 @@ +"""Forward PE Estimates Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class ForwardPeEstimatesQueryParams(QueryParams): + """Forward PE Estimates Query Parameters.""" + + symbol: Optional[str] = Field( + default=None, + description=QUERY_DESCRIPTIONS["symbol"], + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v): + """Convert field to uppercase.""" + return v.upper() if v else None + + +class ForwardPeEstimatesData(Data): + """Forward PE Estimates Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + name: Optional[str] = Field(default=None, description="Name of the entity.") + year1: Optional[float] = Field( + default=None, + description="Estimated PE ratio for the next fiscal year.", + ) + year2: Optional[float] = Field( + default=None, + description="Estimated PE ratio two fiscal years from now.", + ) + year3: Optional[float] = Field( + default=None, + description="Estimated PE ratio three fiscal years from now.", + ) + year4: Optional[float] = Field( + default=None, + description="Estimated PE ratio four fiscal years from now.", + ) + year5: Optional[float] = Field( + default=None, + description="Estimated PE ratio five fiscal years from now.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/forward_sales_estimates.py b/openbb_platform/core/openbb_core/provider/standard_models/forward_sales_estimates.py new file mode 100644 index 0000000000000000000000000000000000000000..14a01db21aa72dee4d0cf9be00b262fa20d3cd94 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/forward_sales_estimates.py @@ -0,0 +1,67 @@ +"""Forward Sales Estimates Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data, ForceInt +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class ForwardSalesEstimatesQueryParams(QueryParams): + """Forward Sales Estimates Query Parameters.""" + + symbol: Optional[str] = Field( + default=None, + description=QUERY_DESCRIPTIONS["symbol"], + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v): + """Convert field to uppercase.""" + return v.upper() if v else None + + +class ForwardSalesEstimatesData(Data): + """Forward Sales Estimates Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + name: Optional[str] = Field(default=None, description="Name of the entity.") + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + fiscal_year: Optional[int] = Field( + default=None, description="Fiscal year for the estimate." + ) + fiscal_period: Optional[str] = Field( + default=None, description="Fiscal quarter for the estimate." + ) + calendar_year: Optional[int] = Field( + default=None, description="Calendar year for the estimate." + ) + calendar_period: Optional[str] = Field( + default=None, description="Calendar quarter for the estimate." + ) + low_estimate: Optional[ForceInt] = Field( + default=None, description="The sales estimate low for the period." + ) + high_estimate: Optional[ForceInt] = Field( + default=None, description="The sales estimate high for the period." + ) + mean: Optional[ForceInt] = Field( + default=None, description="The sales estimate mean for the period." + ) + median: Optional[ForceInt] = Field( + default=None, description="The sales estimate median for the period." + ) + standard_deviation: Optional[ForceInt] = Field( + default=None, + description="The sales estimate standard deviation for the period.", + ) + number_of_analysts: Optional[int] = Field( + default=None, + description="Number of analysts providing estimates for the period.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/fred_release_table.py b/openbb_platform/core/openbb_core/provider/standard_models/fred_release_table.py new file mode 100644 index 0000000000000000000000000000000000000000..e7c0f2b7203f537e1e3c5bcb6410583d6adc088b --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/fred_release_table.py @@ -0,0 +1,93 @@ +"""FRED Release Table Standard Model.""" + +from datetime import date as dateType +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class ReleaseTableQueryParams(QueryParams): + """FRED Release Table Query.""" + + release_id: str = Field( + description="The ID of the release." + " Use `fred_search` to find releases.", + ) + element_id: Optional[str] = Field( + default=None, + description="The element ID of a specific table in the release.", + ) + date: Union[None, dateType, str] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("date", ""), + ) + + @field_validator("date", mode="before", check_fields=False) + @classmethod + def _validate_date(cls, v): + """Validate the date.""" + # pylint: disable=import-outside-toplevel + from pandas import to_datetime + + if v is None: + return None + if isinstance(v, dateType): + return v.strftime("%Y-%m-%d") + new_dates: list = [] + if isinstance(v, str): + dates = v.split(",") + if isinstance(v, list): + dates = v + for date in dates: + new_dates.append(to_datetime(date).date().strftime("%Y-%m-%d")) + + return ",".join(new_dates) if new_dates else None + + +class ReleaseTableData(Data): + """FRED Release Table Data.""" + + date: Optional[dateType] = Field( + default=None, description=DATA_DESCRIPTIONS.get("date", "") + ) + level: Optional[int] = Field( + default=None, + description="The indentation level of the element.", + ) + element_type: Optional[str] = Field( + default=None, + description="The type of the element.", + ) + line: Optional[int] = Field( + default=None, + description="The line number of the element.", + ) + element_id: Optional[str] = Field( + default=None, + description="The element id in the parent/child relationship.", + ) + parent_id: Optional[str] = Field( + default=None, + description="The parent id in the parent/child relationship.", + ) + children: Optional[str] = Field( + default=None, + description="The element_id of each child, as a comma-separated string.", + ) + symbol: Optional[str] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("symbol", ""), + ) + name: Optional[str] = Field( + default=None, + description="The name of the series.", + ) + value: Optional[float] = Field( + default=None, + description="The reported value of the series.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/fred_search.py b/openbb_platform/core/openbb_core/provider/standard_models/fred_search.py new file mode 100644 index 0000000000000000000000000000000000000000..be64922b7c8eb731c7f4119e9ae7317a56d1c219 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/fred_search.py @@ -0,0 +1,104 @@ +"""FRED Search Model.""" + +from datetime import ( + date as dateType, + datetime, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from pydantic import Field + + +class SearchQueryParams(QueryParams): + """FRED Search Query Params.""" + + query: Optional[str] = Field(default=None, description="The search word(s).") + + +class SearchData(Data): + """FRED Search Data.""" + + release_id: Optional[str] = Field( + default=None, + description="The release ID for queries.", + ) + series_id: Optional[str] = Field( + default=None, + description="The series ID for the item in the release.", + ) + series_group: Optional[str] = Field( + default=None, + description="The series group ID of the series. This value is used to query for regional data.", + ) + region_type: Optional[str] = Field( + default=None, + description="The region type of the series.", + ) + name: Optional[str] = Field( + default=None, + description="The name of the release.", + ) + title: Optional[str] = Field( + default=None, + description="The title of the series.", + ) + observation_start: Optional[dateType] = Field( + default=None, description="The date of the first observation in the series." + ) + observation_end: Optional[dateType] = Field( + default=None, description="The date of the last observation in the series." + ) + frequency: Optional[str] = Field( + default=None, + description="The frequency of the data.", + ) + frequency_short: Optional[str] = Field( + default=None, + description="Short form of the data frequency.", + ) + units: Optional[str] = Field( + default=None, + description="The units of the data.", + ) + units_short: Optional[str] = Field( + default=None, + description="Short form of the data units.", + ) + seasonal_adjustment: Optional[str] = Field( + default=None, + description="The seasonal adjustment of the data.", + ) + seasonal_adjustment_short: Optional[str] = Field( + default=None, + description="Short form of the data seasonal adjustment.", + ) + last_updated: Optional[datetime] = Field( + default=None, + description="The datetime of the last update to the data.", + ) + popularity: Optional[int] = Field( + default=None, + description="Popularity of the series", + ) + group_popularity: Optional[int] = Field( + default=None, + description="Group popularity of the release", + ) + realtime_start: Optional[dateType] = Field( + default=None, + description="The realtime start date of the series.", + ) + realtime_end: Optional[dateType] = Field( + default=None, + description="The realtime end date of the series.", + ) + notes: Optional[str] = Field( + default=None, description="Description of the release." + ) + press_release: Optional[bool] = Field( + description="If the release is a press release.", + default=None, + ) + url: Optional[str] = Field(default=None, description="URL to the release.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/fred_series.py b/openbb_platform/core/openbb_core/provider/standard_models/fred_series.py new file mode 100644 index 0000000000000000000000000000000000000000..a0c11843b54c6c6bbaef6555e3fe44c1509db76c --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/fred_series.py @@ -0,0 +1,41 @@ +"""FRED Series Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class SeriesQueryParams(QueryParams): + """FRED Series Query.""" + + symbol: str = Field( + description=QUERY_DESCRIPTIONS.get("symbol", ""), + ) + start_date: Optional[dateType] = Field( + description=QUERY_DESCRIPTIONS.get("start_date", ""), default=None + ) + end_date: Optional[dateType] = Field( + description=QUERY_DESCRIPTIONS.get("end_date", ""), default=None + ) + limit: Optional[int] = Field( + description=QUERY_DESCRIPTIONS.get("limit", ""), default=100000 + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class SeriesData(Data): + """FRED Series Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/futures_curve.py b/openbb_platform/core/openbb_core/provider/standard_models/futures_curve.py new file mode 100644 index 0000000000000000000000000000000000000000..ece5d6d80f08fe3fe0b726d05997223985fdfacc --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/futures_curve.py @@ -0,0 +1,63 @@ +"""Futures Curve Standard Model.""" + +from datetime import date as dateType +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class FuturesCurveQueryParams(QueryParams): + """Futures Curve Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + date: Optional[Union[dateType, str]] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("date", ""), + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v): + """Convert field to uppercase.""" + return v.upper() + + @field_validator("date", mode="before", check_fields=False) + @classmethod + def _validate_date(cls, v): + """Validate the date.""" + # pylint: disable=import-outside-toplevel + from pandas import to_datetime + + if v is None: + return None + if isinstance(v, dateType): + return v.strftime("%Y-%m-%d") + new_dates: list = [] + if isinstance(v, str): + dates = v.split(",") + if isinstance(v, list): + dates = v + for date in dates: + new_dates.append(to_datetime(date).date().strftime("%Y-%m-%d")) + + return ",".join(new_dates) if new_dates else None + + +class FuturesCurveData(Data): + """Futures Curve Data.""" + + date: Optional[dateType] = Field( + default=None, description=DATA_DESCRIPTIONS.get("date", "") + ) + expiration: str = Field(description="Futures expiration month.") + price: float = Field( + default=None, + description="The price of the futures contract.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/futures_historical.py b/openbb_platform/core/openbb_core/provider/standard_models/futures_historical.py new file mode 100644 index 0000000000000000000000000000000000000000..95277188011fb43f5fd10a687bffd3949200efa3 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/futures_historical.py @@ -0,0 +1,54 @@ +"""Futures Historical Price Standard Model.""" + +from datetime import date, datetime +from typing import Optional + +from dateutil import parser +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class FuturesHistoricalQueryParams(QueryParams): + """Futures Historical Price Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + start_date: Optional[date] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[date] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + expiration: Optional[str] = Field( + default=None, + description="Future expiry date with format YYYY-MM", + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class FuturesHistoricalData(Data): + """Futures Historical Price Data.""" + + date: datetime = Field(description=DATA_DESCRIPTIONS.get("date", "")) + open: float = Field(description=DATA_DESCRIPTIONS.get("open", "")) + high: float = Field(description=DATA_DESCRIPTIONS.get("high", "")) + low: float = Field(description=DATA_DESCRIPTIONS.get("low", "")) + close: float = Field(description=DATA_DESCRIPTIONS.get("close", "")) + volume: float = Field(description=DATA_DESCRIPTIONS.get("volume", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def date_validate(cls, v): + """Return formatted datetime.""" + return parser.isoparse(str(v)) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/futures_info.py b/openbb_platform/core/openbb_core/provider/standard_models/futures_info.py new file mode 100644 index 0000000000000000000000000000000000000000..3a10f285628140f9807b9edaf9cc94d6ab3b59ee --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/futures_info.py @@ -0,0 +1,18 @@ +"""Futures Info Standard Model.""" + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class FuturesInfoQueryParams(QueryParams): + """Futures Info Query.""" + + # leaving this empty to let the provider create custom symbol docstrings. + + +class FuturesInfoData(Data): + """Futures Instruments Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/futures_instruments.py b/openbb_platform/core/openbb_core/provider/standard_models/futures_instruments.py new file mode 100644 index 0000000000000000000000000000000000000000..adb7141001ec93ccec84df3ce183f9885320a8ac --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/futures_instruments.py @@ -0,0 +1,12 @@ +"""Futures Instruments Standard Model.""" + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams + + +class FuturesInstrumentsQueryParams(QueryParams): + """Futures Instruments Query.""" + + +class FuturesInstrumentsData(Data): + """Futures Instruments Data.""" diff --git a/openbb_platform/core/openbb_core/provider/standard_models/gdp_forecast.py b/openbb_platform/core/openbb_core/provider/standard_models/gdp_forecast.py new file mode 100644 index 0000000000000000000000000000000000000000..6f3adc58fd0b3d444597563286c66f18bf27f1a8 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/gdp_forecast.py @@ -0,0 +1,33 @@ +"""Forecast GDP Standard Model.""" + +from datetime import date as dateType +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class GdpForecastQueryParams(QueryParams): + """Forecast GDP Query.""" + + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date") + ) + + +class GdpForecastData(Data): + """Forecast GDP Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date")) + country: str = Field(description=DATA_DESCRIPTIONS.get("country")) + value: Union[int, float] = Field( + description="Forecasted GDP value for the country and date." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/gdp_nominal.py b/openbb_platform/core/openbb_core/provider/standard_models/gdp_nominal.py new file mode 100644 index 0000000000000000000000000000000000000000..e416767f80523d764717386ba3ef62b54c929705 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/gdp_nominal.py @@ -0,0 +1,35 @@ +"""Nominal GDP Standard Model.""" + +from datetime import date as dateType +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class GdpNominalQueryParams(QueryParams): + """Nominal GDP Query.""" + + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date") + ) + + +class GdpNominalData(Data): + """Nominal GDP Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date")) + country: str = Field( + default=None, description="The country represented by the GDP value." + ) + value: Union[int, float] = Field( + description="GDP value for the country and date.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/gdp_real.py b/openbb_platform/core/openbb_core/provider/standard_models/gdp_real.py new file mode 100644 index 0000000000000000000000000000000000000000..642d5cf39a263bd862569e028114c1138a36312d --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/gdp_real.py @@ -0,0 +1,35 @@ +"""Real GDP Standard Model.""" + +from datetime import date as dateType +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class GdpRealQueryParams(QueryParams): + """Real GDP Query.""" + + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date") + ) + + +class GdpRealData(Data): + """Real GDP Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date")) + country: str = Field( + default=None, description="The country represented by the Real GDP value." + ) + value: Union[int, float] = Field( + description="Real GDP value for the country and date.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/government_trades.py b/openbb_platform/core/openbb_core/provider/standard_models/government_trades.py new file mode 100644 index 0000000000000000000000000000000000000000..e0aa2daffa72fc5bef38a64e576adc3aa65b2e39 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/government_trades.py @@ -0,0 +1,47 @@ +"""Government Trades Standard Model.""" + +from datetime import date as dateType +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, NonNegativeInt, field_validator + + +class GovernmentTradesQueryParams(QueryParams): + """Government Trades Query.""" + + symbol: Optional[str] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("symbol", "") + ) + chamber: Literal["house", "senate", "all"] = Field( + default="all", description="Government Chamber." + ) + limit: Optional[NonNegativeInt] = Field( + default=100, description=QUERY_DESCRIPTIONS.get("limit", "") + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str): + """Convert field to uppercase.""" + return v.upper() if v else None + + +class GovernmentTradesData(Data): + """Government Trades data.""" + + symbol: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("symbol", "") + ) + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + transaction_date: Optional[dateType] = Field( + default=None, description="Date of Transaction." + ) + representative: Optional[str] = Field( + default=None, description="Name of Representative." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/high_quality_market.py b/openbb_platform/core/openbb_core/provider/standard_models/high_quality_market.py new file mode 100644 index 0000000000000000000000000000000000000000..5c63f7efcde5aa6b8c4b0f6cc07a36ee0186fd5b --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/high_quality_market.py @@ -0,0 +1,34 @@ +"""High Quality Market Corporate Bond Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class HighQualityMarketCorporateBondQueryParams(QueryParams): + """High Quality Market Corporate Bond Query.""" + + date: Optional[Union[dateType, str]] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("date", ""), + ) + + +class HighQualityMarketCorporateBondData(Data): + """High Quality Market Corporate Bond Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + rate: float = Field( + description="Interest rate.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + maturity: str = Field(description="Maturity.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/historical_attributes.py b/openbb_platform/core/openbb_core/provider/standard_models/historical_attributes.py new file mode 100644 index 0000000000000000000000000000000000000000..51235dde37715feb4ca1969bfc756c88d2a0e27e --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/historical_attributes.py @@ -0,0 +1,68 @@ +"""Historical Attributes Standard Model.""" + +from datetime import date as dateType +from typing import List, Literal, Optional, Set, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class HistoricalAttributesQueryParams(QueryParams): + """Historical Attributes Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol")) + tag: str = Field(description="Intrinio data tag ID or code.") + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date") + ) + frequency: Optional[ + Literal["daily", "weekly", "monthly", "quarterly", "yearly"] + ] = Field(default="yearly", description=QUERY_DESCRIPTIONS.get("frequency")) + limit: Optional[int] = Field( + default=1000, description=QUERY_DESCRIPTIONS.get("limit") + ) + tag_type: Optional[str] = Field( + default=None, description="Filter by type, when applicable." + ) + sort: Optional[Literal["asc", "desc"]] = Field( + default="desc", description="Sort order." + ) + + @field_validator("tag", mode="before", check_fields=False) + @classmethod + def multiple_tags(cls, v: Union[str, List[str], Set[str]]): + """Accept a comma-separated string or list of tags.""" + if isinstance(v, str): + return v.lower() + return ",".join([tag.lower() for tag in list(v)]) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + @field_validator("frequency", "sort", mode="before", check_fields=False) + @classmethod + def to_lower(cls, v: Optional[str]) -> Optional[str]: + """Convert field to lowercase.""" + return v.lower() if v else v + + +class HistoricalAttributesData(Data): + """Historical Attributes Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date")) + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol")) + tag: Optional[str] = Field( + default=None, description="Tag name for the fetched data." + ) + value: Optional[float] = Field(default=None, description="The value of the data.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/historical_dividends.py b/openbb_platform/core/openbb_core/provider/standard_models/historical_dividends.py new file mode 100644 index 0000000000000000000000000000000000000000..78068dd3ed82590348079b5cbbf99491e80bba6a --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/historical_dividends.py @@ -0,0 +1,38 @@ +"""Historical Dividends Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class HistoricalDividendsQueryParams(QueryParams): + """Historical Dividends Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date", "") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date", "") + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class HistoricalDividendsData(Data): + """Historical Dividends Data.""" + + ex_dividend_date: dateType = Field( + description="The ex-dividend date - the date on which the stock begins trading without rights to the dividend." + ) + amount: float = Field(description="The dividend amount per share.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/historical_employees.py b/openbb_platform/core/openbb_core/provider/standard_models/historical_employees.py new file mode 100644 index 0000000000000000000000000000000000000000..19011fddfb4a7e7fd117e2da5d4d6e8365fd93ce --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/historical_employees.py @@ -0,0 +1,72 @@ +"""Historical Employees Standard Model.""" + +from datetime import date, datetime +from typing import List, Set, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class HistoricalEmployeesQueryParams(QueryParams): + """Historical Employees Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class HistoricalEmployeesData(Data): + """Historical Employees Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + cik: int = Field(description=DATA_DESCRIPTIONS.get("cik", "")) + acceptance_time: datetime = Field( + description="Time of acceptance of the company employee." + ) + period_of_report: date = Field( + description="Date of reporting of the company employee." + ) + company_name: str = Field( + description="Registered name of the company to retrieve the historical employees of." + ) + form_type: str = Field(description="Form type of the company employee.") + filing_date: date = Field(description="Filing date of the company employee") + employee_count: int = Field(description="Count of employees of the company.") + source: str = Field( + description="Source URL which retrieves this data for the company." + ) + + @field_validator("acceptance_time", mode="before", check_fields=False) + @classmethod + def acceptance_time_validate(cls, v): # pylint: disable=E0213 + """Validate acceptance time.""" + return datetime.strptime(v, "%Y-%m-%d %H:%M:%S") + + @field_validator("period_of_report", mode="before", check_fields=False) + @classmethod + def period_of_report_validate(cls, v): # pylint: disable=E0213 + """Validate period of report.""" + return datetime.strptime(v, "%Y-%m-%d") + + @field_validator("filing_date", mode="before", check_fields=False) + @classmethod + def filing_date_validate(cls, v): # pylint: disable=E0213 + """Validate filing date.""" + return datetime.strptime(v, "%Y-%m-%d") + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: Union[str, List[str], Set[str]]): + """Convert field to uppercase.""" + if isinstance(v, str): + return v.upper() + return ",".join([symbol.upper() for symbol in list(v)]) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/historical_eps.py b/openbb_platform/core/openbb_core/provider/standard_models/historical_eps.py new file mode 100644 index 0000000000000000000000000000000000000000..141de6edc2694122b8aa6780742ecd57585abde1 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/historical_eps.py @@ -0,0 +1,46 @@ +"""Historical EPS Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from dateutil import parser +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class HistoricalEpsQueryParams(QueryParams): + """Historical EPS Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class HistoricalEpsData(Data): + """Historical EPS Data.""" + + date: dateType = Field(default=None, description=DATA_DESCRIPTIONS.get("date", "")) + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + announce_time: Optional[str] = Field( + default=None, description="Timing of the earnings announcement." + ) + eps_actual: Optional[float] = Field( + default=None, description="Actual EPS from the earnings date." + ) + eps_estimated: Optional[float] = Field( + default=None, description="Estimated EPS for the earnings date." + ) + + @field_validator("date", mode="before", check_fields=False) + def date_validate(cls, v): # pylint: disable=E0213 + """Return formatted datetime.""" + return parser.isoparse(str(v)) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/historical_market_cap.py b/openbb_platform/core/openbb_core/provider/standard_models/historical_market_cap.py new file mode 100644 index 0000000000000000000000000000000000000000..149efd287ec6645faa59508f032fb4a508fd721f --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/historical_market_cap.py @@ -0,0 +1,41 @@ +"""Historical Market Cap Model.""" + +from datetime import date as dateType +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class HistoricalMarketCapQueryParams(QueryParams): + """Historical Market Cap Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date", "") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date", "") + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class HistoricalMarketCapData(Data): + """Historical Market Cap Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + market_cap: Union[int, float] = Field( + description="Market capitalization of the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/historical_splits.py b/openbb_platform/core/openbb_core/provider/standard_models/historical_splits.py new file mode 100644 index 0000000000000000000000000000000000000000..0342f485ab381846d53dafcc1fd5711866ced5de --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/historical_splits.py @@ -0,0 +1,42 @@ +"""Historical Splits Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class HistoricalSplitsQueryParams(QueryParams): + """Historical Splits Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class HistoricalSplitsData(Data): + """Historical Splits Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + numerator: Optional[float] = Field( + default=None, + description="Numerator of the split.", + ) + denominator: Optional[float] = Field( + default=None, + description="Denominator of the split.", + ) + split_ratio: Optional[str] = Field( + default=None, + description="Split ratio.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/house_price_index.py b/openbb_platform/core/openbb_core/provider/standard_models/house_price_index.py new file mode 100644 index 0000000000000000000000000000000000000000..aa85600d6c433572cf4fbf25b56c2b5b0049bcdb --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/house_price_index.py @@ -0,0 +1,54 @@ +"""House Price Index Standard Model.""" + +from datetime import date as dateType +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class HousePriceIndexQueryParams(QueryParams): + """House Price Index Query.""" + + country: str = Field( + description=QUERY_DESCRIPTIONS.get("country", ""), + default="united_states", + ) + frequency: Literal["monthly", "quarter", "annual"] = Field( + description=QUERY_DESCRIPTIONS.get("frequency", ""), + default="quarter", + json_schema_extra={"choices": ["monthly", "quarter", "annual"]}, + ) + transform: Literal["index", "yoy", "period"] = Field( + description="Transformation of the CPI data. Period represents the change since previous." + + " Defaults to change from one year ago (yoy).", + default="index", + json_schema_extra={"choices": ["index", "yoy", "period"]}, + ) + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date") + ) + + +class HousePriceIndexData(Data): + """House Price Index Data.""" + + date: Optional[dateType] = Field( + default=None, description=DATA_DESCRIPTIONS.get("date") + ) + country: Optional[str] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("country", ""), + ) + value: Optional[float] = Field( + default=None, + description="Share price index value.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/ice_bofa.py b/openbb_platform/core/openbb_core/provider/standard_models/ice_bofa.py new file mode 100644 index 0000000000000000000000000000000000000000..725e6dff84ffa52fff3628704ae3a068f656ceb3 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/ice_bofa.py @@ -0,0 +1,46 @@ +"""ICE BofA US Corporate Bond Indices Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class ICEBofAQueryParams(QueryParams): + """ICE BofA US Corporate Bond Indices Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + index_type: Literal["yield", "yield_to_worst", "total_return", "spread"] = Field( + default="yield", + description="The type of series.", + ) + + @field_validator("index_type", mode="before", check_fields=False) + @classmethod + def to_lower(cls, v: Optional[str]) -> Optional[str]: + """Convert field to lowercase.""" + return v.lower() if v else v + + +class ICEBofAData(Data): + """ICE BofA US Corporate Bond Indices Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + rate: Optional[float] = Field( + description="ICE BofA US Corporate Bond Indices Rate." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/immediate_interest_rate.py b/openbb_platform/core/openbb_core/provider/standard_models/immediate_interest_rate.py new file mode 100644 index 0000000000000000000000000000000000000000..90678728d4105a12edb7900fb06e85f87204bee7 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/immediate_interest_rate.py @@ -0,0 +1,44 @@ +"""Immediate Interest Rates Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class ImmediateInterestRateQueryParams(QueryParams): + """Immediate (Call money, interbank rate) Rate Query.""" + + country: str = Field( + description=QUERY_DESCRIPTIONS.get("country", ""), + default="united_states", + ) + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date") + ) + + +class ImmediateInterestRateData(Data): + """Immediate Interest Rates Data.""" + + date: Optional[dateType] = Field( + default=None, description=DATA_DESCRIPTIONS.get("date") + ) + country: Optional[str] = Field( + default=None, + description="Country for which interest rate is given", + ) + value: Optional[float] = Field( + default=None, + description="Immediate interest rates, call money, interbank rate.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/income_statement.py b/openbb_platform/core/openbb_core/provider/standard_models/income_statement.py new file mode 100644 index 0000000000000000000000000000000000000000..de752603942b816a67d84f1dee556c9bda94bde7 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/income_statement.py @@ -0,0 +1,38 @@ +"""Income Statement Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + QUERY_DESCRIPTIONS, +) +from pydantic import Field, NonNegativeInt, field_validator + + +class IncomeStatementQueryParams(QueryParams): + """Income Statement Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + limit: Optional[NonNegativeInt] = Field( + default=5, description=QUERY_DESCRIPTIONS.get("limit", "") + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str): + """Convert field to uppercase.""" + return v.upper() + + +class IncomeStatementData(Data): + """Income Statement Data.""" + + period_ending: dateType = Field(description="The end date of the reporting period.") + fiscal_period: Optional[str] = Field( + description="The fiscal period of the report.", default=None + ) + fiscal_year: Optional[int] = Field( + description="The fiscal year of the fiscal period.", default=None + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/income_statement_growth.py b/openbb_platform/core/openbb_core/provider/standard_models/income_statement_growth.py new file mode 100644 index 0000000000000000000000000000000000000000..5c9b1590054094c6e1142127d2d613717f9769ea --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/income_statement_growth.py @@ -0,0 +1,36 @@ +"""Income Statement Growth Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field, field_validator + + +class IncomeStatementGrowthQueryParams(QueryParams): + """Income Statement Growth Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + limit: Optional[int] = Field( + default=10, description=QUERY_DESCRIPTIONS.get("limit", "") + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class IncomeStatementGrowthData(Data): + """Income Statement Growth Data.""" + + period_ending: dateType = Field(description="The end date of the reporting period.") + fiscal_period: Optional[str] = Field( + description="The fiscal period of the report.", default=None + ) + fiscal_year: Optional[int] = Field( + description="The fiscal year of the fiscal period.", default=None + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/index_constituents.py b/openbb_platform/core/openbb_core/provider/standard_models/index_constituents.py new file mode 100644 index 0000000000000000000000000000000000000000..21c60c199128e6aa53aafa68bf5fc4138c5bb8b5 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/index_constituents.py @@ -0,0 +1,32 @@ +"""Index Constituents Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class IndexConstituentsQueryParams(QueryParams): + """Index Constituents Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @classmethod + @field_validator("symbol") + def _to_upper(cls, v): + """Convert the symbol to uppercase.""" + return v.upper() + + +class IndexConstituentsData(Data): + """Index Constituents Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + name: Optional[str] = Field( + default=None, description="Name of the constituent company in the index." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/index_historical.py b/openbb_platform/core/openbb_core/provider/standard_models/index_historical.py new file mode 100644 index 0000000000000000000000000000000000000000..cacb738fd647d80dbe29eea1e4f420e6612f6204 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/index_historical.py @@ -0,0 +1,65 @@ +"""Index Historical Standard Model.""" + +from datetime import ( + date as dateType, + datetime, +) +from typing import Optional, Union + +from dateutil import parser +from openbb_core.provider.abstract.data import Data, ForceInt +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, StrictFloat, field_validator + + +class IndexHistoricalQueryParams(QueryParams): + """Index Historical Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + start_date: Optional[dateType] = Field( + description=QUERY_DESCRIPTIONS.get("start_date", ""), default=None + ) + end_date: Optional[dateType] = Field( + description=QUERY_DESCRIPTIONS.get("end_date", ""), default=None + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class IndexHistoricalData(Data): + """Index Historical Data.""" + + date: Union[dateType, datetime] = Field( + description=DATA_DESCRIPTIONS.get("date", "") + ) + open: Optional[StrictFloat] = Field( + default=None, description=DATA_DESCRIPTIONS.get("open", "") + ) + high: Optional[StrictFloat] = Field( + default=None, description=DATA_DESCRIPTIONS.get("high", "") + ) + low: Optional[StrictFloat] = Field( + default=None, description=DATA_DESCRIPTIONS.get("low", "") + ) + close: Optional[StrictFloat] = Field( + default=None, description=DATA_DESCRIPTIONS.get("close", "") + ) + volume: Optional[ForceInt] = Field( + default=None, description=DATA_DESCRIPTIONS.get("volume", "") + ) + + @field_validator("date", mode="before", check_fields=False) + @classmethod + def date_validate(cls, v): + """Return formatted datetime.""" + if ":" in str(v): + return parser.isoparse(str(v)) + return parser.parse(str(v)).date() diff --git a/openbb_platform/core/openbb_core/provider/standard_models/index_info.py b/openbb_platform/core/openbb_core/provider/standard_models/index_info.py new file mode 100644 index 0000000000000000000000000000000000000000..07c734668c5c786f1218bf0d2ae9626ab0c6de06 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/index_info.py @@ -0,0 +1,42 @@ +"""Index Info Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class IndexInfoQueryParams(QueryParams): + """Index Info Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol") + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class IndexInfoData(Data): + """Index Info Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + name: str = Field(description="The name of the index.") + description: Optional[str] = Field( + description="The short description of the index.", default=None + ) + methodology: Optional[str] = Field( + description="URL to the methodology document.", default=None + ) + factsheet: Optional[str] = Field( + description="URL to the factsheet document.", default=None + ) + num_constituents: Optional[int] = Field( + description="The number of constituents in the index.", default=None + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/index_search.py b/openbb_platform/core/openbb_core/provider/standard_models/index_search.py new file mode 100644 index 0000000000000000000000000000000000000000..c60b98fcf41bc908e4e1dabe484bad75f96e47bb --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/index_search.py @@ -0,0 +1,22 @@ +"""Index Search Standard Model.""" + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class IndexSearchQueryParams(QueryParams): + """Index Search Query.""" + + query: str = Field(description="Search query.", default="") + is_symbol: bool = Field( + description="Whether to search by ticker symbol.", default=False + ) + + +class IndexSearchData(Data): + """Index Search Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + name: str = Field(description="Name of the index.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/index_sectors.py b/openbb_platform/core/openbb_core/provider/standard_models/index_sectors.py new file mode 100644 index 0000000000000000000000000000000000000000..43c2578d042132c53602644d1aef4c1da79338ae --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/index_sectors.py @@ -0,0 +1,25 @@ +"""Index Sectors Standard Model.""" + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field, field_validator + + +class IndexSectorsQueryParams(QueryParams): + """Index Sectors Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol") + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class IndexSectorsData(Data): + """Index Sectors Data.""" + + sector: str = Field(description="The sector name.") + weight: float = Field(description="The weight of the sector in the index.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/index_snapshots.py b/openbb_platform/core/openbb_core/provider/standard_models/index_snapshots.py new file mode 100644 index 0000000000000000000000000000000000000000..9844ad7f1e1b80d863da356959cc5c9b63d079a0 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/index_snapshots.py @@ -0,0 +1,52 @@ +"""Index Snapshots Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class IndexSnapshotsQueryParams(QueryParams): + """Index Snapshots Query.""" + + region: str = Field( + default="us", description="The region of focus for the data - i.e., us, eu." + ) + + +class IndexSnapshotsData(Data): + """Index Snapshots Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + name: Optional[str] = Field(default=None, description="Name of the index.") + currency: Optional[str] = Field(default=None, description="Currency of the index.") + price: Optional[float] = Field( + default=None, description="Current price of the index." + ) + open: Optional[float] = Field( + default=None, description=DATA_DESCRIPTIONS.get("open", "") + ) + high: Optional[float] = Field( + default=None, description=DATA_DESCRIPTIONS.get("high", "") + ) + low: Optional[float] = Field( + default=None, description=DATA_DESCRIPTIONS.get("low", "") + ) + close: Optional[float] = Field( + default=None, description=DATA_DESCRIPTIONS.get("close", "") + ) + volume: Optional[int] = Field( + default=None, description=DATA_DESCRIPTIONS.get("volume", "") + ) + prev_close: Optional[float] = Field( + default=None, description=DATA_DESCRIPTIONS.get("prev_close", "") + ) + change: Optional[float] = Field( + default=None, description="Change in value of the index." + ) + change_percent: Optional[float] = Field( + default=None, + description="Change, in normalized percentage points, of the index.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/industry_pe.py b/openbb_platform/core/openbb_core/provider/standard_models/industry_pe.py new file mode 100644 index 0000000000000000000000000000000000000000..4a99574650950711b41e58aad48b12650d930ed1 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/industry_pe.py @@ -0,0 +1,26 @@ +"""Industry P/E Ratio Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class IndustryPEQueryParams(QueryParams): + """Industry P/E Ratio Query.""" + + +class IndustryPEData(Data): + """Industry P/E Ratio Data.""" + + date: Optional[dateType] = Field( + description=DATA_DESCRIPTIONS.get("date", ""), default=None + ) + exchange: Optional[str] = Field( + default=None, description="The exchange where the data is from." + ) + industry: str = Field(description="The name of the industry.") + pe: float = Field(description="The P/E ratio of the industry.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/insider_trading.py b/openbb_platform/core/openbb_core/provider/standard_models/insider_trading.py new file mode 100644 index 0000000000000000000000000000000000000000..b391a51bda85e5b71aedd08443801feb15d53792 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/insider_trading.py @@ -0,0 +1,93 @@ +"""Insider Trading Standard Model.""" + +from datetime import ( + date as dateType, + datetime, + time, +) +from typing import Optional, Union + +from dateutil import parser +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, StrictInt, field_validator + + +class InsiderTradingQueryParams(QueryParams): + """Insider Trading Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + limit: StrictInt = Field( + default=500, + description=QUERY_DESCRIPTIONS.get("limit", ""), + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class InsiderTradingData(Data): + """Insider Trading Data.""" + + symbol: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("symbol", "") + ) + company_cik: Optional[Union[int, str]] = Field( + default=None, description="CIK number of the company." + ) + filing_date: Optional[Union[dateType, datetime]] = Field( + default=None, description="Filing date of the trade." + ) + transaction_date: Optional[dateType] = Field( + default=None, description="Date of the transaction." + ) + owner_cik: Optional[Union[int, str]] = Field( + default=None, description="Reporting individual's CIK." + ) + owner_name: Optional[str] = Field( + default=None, description="Name of the reporting individual." + ) + owner_title: Optional[str] = Field( + default=None, description="The title held by the reporting individual." + ) + transaction_type: Optional[str] = Field( + default=None, description="Type of transaction being reported." + ) + acquisition_or_disposition: Optional[str] = Field( + default=None, description="Acquisition or disposition of the shares." + ) + security_type: Optional[str] = Field( + default=None, description="The type of security transacted." + ) + securities_owned: Optional[float] = Field( + default=None, + description="Number of securities owned by the reporting individual.", + ) + securities_transacted: Optional[float] = Field( + default=None, + description="Number of securities transacted by the reporting individual.", + ) + transaction_price: Optional[float] = Field( + default=None, description="The price of the transaction." + ) + filing_url: Optional[str] = Field(default=None, description="Link to the filing.") + + @field_validator( + "filing_date", "transaction_date", mode="before", check_fields=False + ) + @classmethod + def date_validate(cls, v): # pylint: disable=E0213 + """Return formatted datetime.""" + if v: + filing_date = parser.isoparse(str(v)) + if filing_date.time() == time(0, 0): + return filing_date.date() + return filing_date + return None diff --git a/openbb_platform/core/openbb_core/provider/standard_models/institutional_ownership.py b/openbb_platform/core/openbb_core/provider/standard_models/institutional_ownership.py new file mode 100644 index 0000000000000000000000000000000000000000..66133c189395633b681e5c2dff77dfbbf8fe47d0 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/institutional_ownership.py @@ -0,0 +1,43 @@ +"""Institutional Ownership Standard Model.""" + +from datetime import date as dateType +from typing import List, Optional, Set, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class InstitutionalOwnershipQueryParams(QueryParams): + """Institutional Ownership Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class InstitutionalOwnershipData(Data): + """Institutional Ownership Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + cik: Optional[str] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("cik", ""), + ) + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: Union[str, List[str], Set[str]]): + """Convert field to uppercase.""" + if isinstance(v, str): + return v.upper() + return ",".join([symbol.upper() for symbol in list(v)]) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/iorb_rates.py b/openbb_platform/core/openbb_core/provider/standard_models/iorb_rates.py new file mode 100644 index 0000000000000000000000000000000000000000..8815fb0b6321d6bc4dbd10fd289d81dc894b2410 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/iorb_rates.py @@ -0,0 +1,32 @@ +"""IORB Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class IORBQueryParams(QueryParams): + """IORB Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class IORBData(Data): + """IORB Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + rate: Optional[float] = Field(description="IORB rate.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/key_executives.py b/openbb_platform/core/openbb_core/provider/standard_models/key_executives.py new file mode 100644 index 0000000000000000000000000000000000000000..9d875b4082e469c3cbd1e838bd6aebda0de19cdd --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/key_executives.py @@ -0,0 +1,42 @@ +"""Key Executives Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data, ForceInt +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field, field_validator + + +class KeyExecutivesQueryParams(QueryParams): + """Key Executives Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class KeyExecutivesData(Data): + """Key Executives Data.""" + + title: str = Field(description="Designation of the key executive.") + name: str = Field(description="Name of the key executive.") + pay: Optional[ForceInt] = Field( + default=None, description="Pay of the key executive." + ) + currency_pay: Optional[str] = Field( + default=None, description="Currency of the pay." + ) + gender: Optional[str] = Field( + default=None, description="Gender of the key executive." + ) + year_born: Optional[ForceInt] = Field( + default=None, description="Birth year of the key executive." + ) + title_since: Optional[ForceInt] = Field( + default=None, description="Date the tile was held since." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/key_metrics.py b/openbb_platform/core/openbb_core/provider/standard_models/key_metrics.py new file mode 100644 index 0000000000000000000000000000000000000000..475bc3d04685453489500fb2688197287eef8dba --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/key_metrics.py @@ -0,0 +1,40 @@ +"""Key Metrics Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class KeyMetricsQueryParams(QueryParams): + """Key Metrics Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + limit: Optional[int] = Field( + default=100, description=QUERY_DESCRIPTIONS.get("limit", "") + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class KeyMetricsData(Data): + """Key Metrics Data.""" + + symbol: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("symbol", "") + ) + market_cap: Optional[float] = Field( + default=None, description="Market capitalization" + ) + pe_ratio: Optional[float] = Field( + default=None, description="Price-to-earnings ratio (P/E ratio)" + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/latest_attributes.py b/openbb_platform/core/openbb_core/provider/standard_models/latest_attributes.py new file mode 100644 index 0000000000000000000000000000000000000000..562d02e908c959e3e263ab416e75219b8425c6c2 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/latest_attributes.py @@ -0,0 +1,44 @@ +"""Latest Attributes Standard Model.""" + +from typing import List, Optional, Set, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class LatestAttributesQueryParams(QueryParams): + """Latest Attributes Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol")) + tag: str = Field(description="Intrinio data tag ID or code.") + + @field_validator("tag", mode="before", check_fields=False) + @classmethod + def multiple_tags(cls, v: Union[str, List[str], Set[str]]): + """Accept a comma-separated string or list of tags.""" + if isinstance(v, str): + return v.lower() + return ",".join([tag.lower() for tag in list(v)]) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class LatestAttributesData(Data): + """Latest Attributes Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol")) + tag: Optional[str] = Field( + default=None, description="Tag name for the fetched data." + ) + value: Optional[Union[str, float]] = Field( + default=None, description="The value of the data." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/latest_financial_reports.py b/openbb_platform/core/openbb_core/provider/standard_models/latest_financial_reports.py new file mode 100644 index 0000000000000000000000000000000000000000..62b7d8f2ed83c19e8ad5ea5b79ea8525f6382d15 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/latest_financial_reports.py @@ -0,0 +1,35 @@ +"""Latest Financial Reports Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class LatestFinancialReportsQueryParams(QueryParams): + """Latest Financial Reports Query.""" + + +class LatestFinancialReportsData(Data): + """Latest Financial Reports Data.""" + + filing_date: dateType = Field(description="The date of the filing.") + period_ending: Optional[dateType] = Field( + default=None, description="Report for the period ending." + ) + symbol: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("symbol") + ) + name: Optional[str] = Field(default=None, description="Name of the company.") + cik: Optional[str] = Field(default=None, description=DATA_DESCRIPTIONS.get("cik")) + sic: Optional[str] = Field( + default=None, description="Standard Industrial Classification code." + ) + report_type: Optional[str] = Field(default=None, description="Type of filing.") + description: Optional[str] = Field( + default=None, description="Description of the report." + ) + url: str = Field(description="URL to the filing page.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/lbma_fixing.py b/openbb_platform/core/openbb_core/provider/standard_models/lbma_fixing.py new file mode 100644 index 0000000000000000000000000000000000000000..83139212ec9b8730aab1288c7f4a31f27678274f --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/lbma_fixing.py @@ -0,0 +1,75 @@ +"""LBMA Fixing Standard Model.""" + +from datetime import date as dateType +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class LbmaFixingQueryParams(QueryParams): + """ + LBMA Fixing Query. + + Source: https://www.lbma.org.uk/prices-and-data/precious-metal-prices#/table + """ + + asset: Literal["gold", "silver"] = Field( + description="The metal to get price fixing rates for.", + default="gold", + ) + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class LbmaFixingData(Data): + """LBMA Fixing Data. Historical fixing prices in USD, GBP and EUR.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + usd_am: Optional[float] = Field( + default=None, + description="AM fixing price in USD.", + ) + usd_pm: Optional[float] = Field( + default=None, + description="PM fixing price in USD.", + ) + gbp_am: Optional[float] = Field( + default=None, + description="AM fixing price in GBP.", + ) + gbp_pm: Optional[float] = Field( + default=None, + description="PM fixing price in GBP.", + ) + euro_am: Optional[float] = Field( + default=None, + description="AM fixing price in EUR.", + ) + euro_pm: Optional[float] = Field( + default=None, + description="PM fixing price in EUR.", + ) + usd: Optional[float] = Field( + default=None, + description="Daily fixing price in USD.", + ) + gbp: Optional[float] = Field( + default=None, + description="Daily fixing price in GBP.", + ) + eur: Optional[float] = Field( + default=None, + description="Daily fixing price in EUR.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/long_term_interest_rate.py b/openbb_platform/core/openbb_core/provider/standard_models/long_term_interest_rate.py new file mode 100644 index 0000000000000000000000000000000000000000..494c41466e2e1dae4d566b5dfb4af7ed62566a72 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/long_term_interest_rate.py @@ -0,0 +1,39 @@ +"""Long Term Interest Rates Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class LTIRQueryParams(QueryParams): + """Long Term Interest Rates Query.""" + + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date") + ) + + +class LTIRData(Data): + """Long Term Interest Rates Data.""" + + date: Optional[dateType] = Field( + default=None, description=DATA_DESCRIPTIONS.get("date") + ) + value: Optional[float] = Field( + default=None, + description="Interest rate (given as a whole number, i.e 10=10%)", + ) + country: Optional[str] = Field( + default=None, + description="Country for which interest rate is given", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/management_discussion_analysis.py b/openbb_platform/core/openbb_core/provider/standard_models/management_discussion_analysis.py new file mode 100644 index 0000000000000000000000000000000000000000..0c4b483cf7df7d4e57d1c6670b2eff4d45e7431f --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/management_discussion_analysis.py @@ -0,0 +1,48 @@ +"""Management Discussion & Analysis Standard Model.""" + +from datetime import date as dateType +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class ManagementDiscussionAnalysisQueryParams(QueryParams): + """Management Discussion & Analysis Query Parameters.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + calendar_year: Optional[int] = Field( + default=None, + description="Calendar year of the report. By default, is the current year." + + " If the calendar period is not provided, but the calendar year is, it will return the annual report.", + ) + calendar_period: Optional[Literal["Q1", "Q2", "Q3", "Q4"]] = Field( + default=None, + description="Calendar period of the report. By default, is the most recent report available for the symbol." + + " If no calendar year and no calendar period are provided, it will return the most recent report.", + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str): + """Convert field to uppercase.""" + return v.upper() + + +class ManagementDiscussionAnalysisData(Data): + """Management Discussion & Analysis Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + calendar_year: int = Field(description="The calendar year of the report.") + calendar_period: int = Field(description="The calendar period of the report.") + period_ending: Optional[dateType] = Field( + description="The end date of the reporting period.", default=None + ) + content: str = Field( + description="The content of the management discussion and analysis." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/manufacturing_outlook_texas.py b/openbb_platform/core/openbb_core/provider/standard_models/manufacturing_outlook_texas.py new file mode 100644 index 0000000000000000000000000000000000000000..5be3083e961b7623699332f89e5fe18a16210bc2 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/manufacturing_outlook_texas.py @@ -0,0 +1,54 @@ +"""Manufacturing Outlook - Texas - Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class ManufacturingOutlookTexasQueryParams(QueryParams): + """Manufacturing Outlook - Texas - Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class ManufacturingOutlookTexasData(Data): + """Manufacturing Outlook - Texas - Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + topic: Optional[str] = Field( + default=None, description="Topic of the survey response." + ) + diffusion_index: Optional[float] = Field( + default=None, description="Diffusion Index." + ) + percent_reporting_increase: Optional[float] = Field( + default=None, + description="Percent of respondents reporting an increase over the last month.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percent_reporting_decrease: Optional[float] = Field( + default=None, + description="Percent of respondents reporting a decrease over the last month.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percent_reporting_no_change: Optional[float] = Field( + default=None, + description="Percent of respondents reporting no change over the last month.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/market_movers.py b/openbb_platform/core/openbb_core/provider/standard_models/market_movers.py new file mode 100644 index 0000000000000000000000000000000000000000..c230e21b954d163b78443bf8cc1ecf9293fadecf --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/market_movers.py @@ -0,0 +1,24 @@ +"""Market Movers Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class MarketMoversQueryParams(QueryParams): + """Market Movers Query.""" + + +class MarketMoversData(Data): + """Market Movers Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + name: Optional[str] = Field( + default=None, description="The name associated with the ticker." + ) + price: float = Field(description="The last price of the ticker.") + change: float = Field(description="The change in price from open.") + change_percent: float = Field(description="The change in percent from open.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/market_snapshots.py b/openbb_platform/core/openbb_core/provider/standard_models/market_snapshots.py new file mode 100644 index 0000000000000000000000000000000000000000..0c38ca938b7e770a8282a9219658dd7eafd0fd8f --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/market_snapshots.py @@ -0,0 +1,50 @@ +"""Market Snapshots Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data, ForceInt +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class MarketSnapshotsQueryParams(QueryParams): + """Market Snapshots Query.""" + + +class MarketSnapshotsData(Data): + """Market Snapshots Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + open: Optional[float] = Field( + description=DATA_DESCRIPTIONS.get("open", ""), + default=None, + ) + high: Optional[float] = Field( + description=DATA_DESCRIPTIONS.get("high", ""), + default=None, + ) + low: Optional[float] = Field( + description=DATA_DESCRIPTIONS.get("low", ""), + default=None, + ) + close: Optional[float] = Field( + description=DATA_DESCRIPTIONS.get("close", ""), + default=None, + ) + volume: Optional[ForceInt] = Field( + description=DATA_DESCRIPTIONS.get("volume", ""), default=None + ) + prev_close: Optional[float] = Field( + description=DATA_DESCRIPTIONS.get("prev_close", ""), + default=None, + ) + change: Optional[float] = Field( + description="The change in price from the previous close.", + default=None, + ) + change_percent: Optional[float] = Field( + description="The change in price from the previous close, as a normalized percent.", + default=None, + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/money_measures.py b/openbb_platform/core/openbb_core/provider/standard_models/money_measures.py new file mode 100644 index 0000000000000000000000000000000000000000..aa6c7c89b927d27b7fd2341976ad0194bef24503 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/money_measures.py @@ -0,0 +1,52 @@ +"""Money Measures Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class MoneyMeasuresQueryParams(QueryParams): + """Treasury Rates Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + adjusted: Optional[bool] = Field( + default=True, description="Whether to return seasonally adjusted data." + ) + + +class MoneyMeasuresData(Data): + """Money Measures Data.""" + + month: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + M1: float = Field(description="Value of the M1 money supply in billions.") + M2: float = Field(description="Value of the M2 money supply in billions.") + currency: Optional[float] = Field( + description="Value of currency in circulation in billions.", default=None + ) + demand_deposits: Optional[float] = Field( + description="Value of demand deposits in billions.", default=None + ) + retail_money_market_funds: Optional[float] = Field( + description="Value of retail money market funds in billions.", default=None + ) + other_liquid_deposits: Optional[float] = Field( + description="Value of other liquid deposits in billions.", default=None + ) + small_denomination_time_deposits: Optional[float] = Field( + description="Value of small denomination time deposits in billions.", + default=None, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/moody.py b/openbb_platform/core/openbb_core/provider/standard_models/moody.py new file mode 100644 index 0000000000000000000000000000000000000000..6c85e2ba72ff4ad44ab49f04341b84640dba1ea6 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/moody.py @@ -0,0 +1,44 @@ +"""Moody Corporate Bond Index Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class MoodyCorporateBondIndexQueryParams(QueryParams): + """Moody Corporate Bond Index Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + index_type: Literal["aaa", "baa"] = Field( + default="aaa", + description="The type of series.", + ) + + @field_validator("index_type", mode="before", check_fields=False) + @classmethod + def to_lower(cls, v: Optional[str]) -> Optional[str]: + """Convert field to lowercase.""" + return v.lower() if v else v + + +class MoodyCorporateBondIndexData(Data): + """Moody Corporate Bond Index Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + rate: Optional[float] = Field(description="Moody Corporate Bond Index Rate.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/mortgage_indices.py b/openbb_platform/core/openbb_core/provider/standard_models/mortgage_indices.py new file mode 100644 index 0000000000000000000000000000000000000000..8772ef794402ce55e9d2523549e3e34d9b71fa2a --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/mortgage_indices.py @@ -0,0 +1,45 @@ +"""Mortgage Indices Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class MortgageIndicesQueryParams(QueryParams): + """Mortgage Indices Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class MortgageIndicesData(Data): + """Mortgage Indices Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + symbol: Optional[str] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("symbol", ""), + ) + name: Optional[str] = Field( + default=None, + description="Name of the index.", + ) + rate: float = Field( + description="Mortgage rate.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/non_farm_payrolls.py b/openbb_platform/core/openbb_core/provider/standard_models/non_farm_payrolls.py new file mode 100644 index 0000000000000000000000000000000000000000..582e3fd749cfd540f25e94e17cea7d0b5b46eca2 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/non_farm_payrolls.py @@ -0,0 +1,30 @@ +"""NonFarm Payrolls Standard Model.""" + +from datetime import date as dateType +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class NonFarmPayrollsQueryParams(QueryParams): + """NonFarm Payrolls Query.""" + + date: Optional[Union[dateType, str]] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("date", "") + + " Default is the latest report.", + ) + + +class NonFarmPayrollsData(Data): + """NonFarm Payrolls Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + value: float = Field(description=DATA_DESCRIPTIONS.get("value", "")) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/options_chains.py b/openbb_platform/core/openbb_core/provider/standard_models/options_chains.py new file mode 100644 index 0000000000000000000000000000000000000000..38486d07f809b00d6b5a019d18edeca36e7bc645 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/options_chains.py @@ -0,0 +1,372 @@ +"""Options Chains Standard Model.""" + +from datetime import ( + date as dateType, + datetime, +) +from typing import List, Union + +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from openbb_core.provider.utils.options_chains_properties import OptionsChainsProperties +from pydantic import Field, field_validator, model_serializer + + +class OptionsChainsQueryParams(QueryParams): + """Options Chains Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Return the symbol in uppercase.""" + return v.upper() + + +class OptionsChainsData(OptionsChainsProperties): + """Options Chains Data. + + Note: The attached properties and methods are available only when working with an instance of this class, + initialized with validated provider data. The items below bind to the `results` object in the function's output. + + Properties + ---------- + dataframe: DataFrame + Return all data as a Pandas DataFrame, with additional computed columns (Breakeven, GEX, DEX) if available. + expirations: List[str] + Return a list of unique expiration dates, as strings. + strikes: List[float] + Return a list of unique strike prices. + has_iv: bool + Return True if the data contains implied volatility. + has_greeks: bool + Return True if the data contains greeks. + total_oi: Dict + Return open interest stats as a nested dictionary with keys: total, expiration, strike. + Both, "expiration" and "strike", contain a list of records with fields: Calls, Puts, Total, Net Percent, PCR. + total_volume: Dict + Return volume stats as a nested dictionary with keys: total, expiration, strike. + Both, "expiration" and "strike", contain a list of records with fields: Calls, Puts, Total, Net Percent, PCR. + total_dex: Dict + Return Delta Dollars (DEX), if available, as a nested dictionary with keys: total, expiration, strike. + Both, "expiration" and "strike", contain a list of records with fields: Calls, Puts, Total, Net Percent, PCR. + total_gex: Dict + Return Gamma Exposure (GEX), if available, as a nested dictionary with keys: total, expiration, strike. + Both, "expiration" and "strike", contain a list of records with fields: Calls, Puts, Total, Net Percent, PCR. + last_price: float + Manually set the underlying price by assigning a float value to this property. + Certain provider/symbol combinations may not return the underlying price, + and it may be necessary, or desirable, to set it post-initialization. + This property can be used to override the underlying price returned by the provider. + It is not set automatically, and this property will return None if it is not set. + + Methods + ------- + filter_data( + date: Optional[Union[str, int]] = None, + column: Optional[str] = None, + option_type: Optional[Literal["call", "put"]] = None, + moneyness: Optional[Literal["otm", "itm"]] = None, + value_min: Optional[float] = None, + value_max: Optional[float] = None, + stat: Optional[Literal["open_interest", "volume", "dex", "gex"]] = None, + by: Literal["expiration", "strike"] = "expiration", + ) -> DataFrame: + Return statistics by strike or expiration; or, the filtered chains data. + skew( + date: Optional[Union[int, str]] = None, underlying_price: Optional[float] = None) + -> DataFrame: + Return skewness of the options, either vertical or horizontal, by nearest DTE. + straddle( + days: Optional[int] = None, strike: Optional[float] = None, underlying_price: Optional[float] = None + ) -> DataFrame: + Calculates the cost of a straddle, by nearest DTE. Use a negative strike price for short options. + strangle( + days: Optional[int] = None, moneyness: Optional[float] = None, underlying_price: Optional[float] = None + ) -> DataFrame: + Calculates the cost of a strangle, by nearest DTE and % moneyness. + Use a negative value for moneyness for short options. + synthetic_long( + days: Optional[int] = None, strike: Optional[float] = None, underlying_price: Optional[float] = None + ) -> DataFrame: + Calculates the cost of a synthetic long position, by nearest DTE and strike price. + synthetic_short( + days: Optional[int] = None, strike: Optional[float] = None, underlying_price: Optional[float] = None + ) -> DataFrame: + Calculates the cost of a synthetic short position, by nearest DTE and strike price. + vertical_call( + days: Optional[int] = None, sold: Optional[float] = None, bought: Optional[float] = None, + underlying_price: Optional[float] = None + ) -> DataFrame: + Calculates the cost of a vertical call spread, by nearest DTE and strike price to sold and bought levels. + vertical_put( + days: Optional[int] = None, sold: Optional[float] = None, bought: Optional[float] = None, + underlying_price: Optional[float] = None + ) -> DataFrame: + Calculates the cost of a vertical put spread, by nearest DTE and strike price to sold and bought levels. + strategies( + days: Optional[int] = None, + straddle_strike: Optional[float] = None, + strangle_moneyness: Optional[List[float]] = None, + synthetic_longs: Optional[List[float]] = None, + synthetic_shorts: Optional[List[float]] = None, + vertical_calls: Optional[List[tuple]] = None, + vertical_puts: Optional[List[tuple]] = None, + underlying_price: Optional[float] = None, + ) -> DataFrame: + Method for combining multiple strategies and parameters in a single DataFrame. + To get all expirations, set days to -1. + + Raises + ------ + OpenBBError + OpenBBError will raise when accessing properties and methods if required, specific, data was not found. + """ + + underlying_symbol: List[Union[str, None]] = Field( + default_factory=list, + description="Underlying symbol for the option.", + ) + underlying_price: List[Union[float, None]] = Field( + default_factory=list, + description="Price of the underlying stock.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + contract_symbol: List[str] = Field(description="Contract symbol for the option.") + eod_date: List[Union[dateType, None]] = Field( + default_factory=list, + description="Date for which the options chains are returned.", + ) + expiration: List[dateType] = Field(description="Expiration date of the contract.") + dte: List[Union[int, None]] = Field( + default_factory=list, description="Days to expiration of the contract." + ) + strike: List[float] = Field( + description="Strike price of the contract.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + option_type: List[str] = Field(description="Call or Put.") + contract_size: List[Union[int, float, None]] = Field( + default_factory=list, description="Number of underlying units per contract." + ) + open_interest: List[Union[int, float, None]] = Field( + default_factory=list, description="Open interest on the contract." + ) + volume: List[Union[int, float, None]] = Field( + default_factory=list, description=DATA_DESCRIPTIONS.get("volume", "") + ) + theoretical_price: List[Union[float, None]] = Field( + default_factory=list, + description="Theoretical value of the option.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + last_trade_price: List[Union[float, None]] = Field( + default_factory=list, + description="Last trade price of the option.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + last_trade_size: List[Union[int, float, None]] = Field( + default_factory=list, description="Last trade size of the option." + ) + last_trade_time: List[Union[datetime, None]] = Field( + default_factory=list, + description="The timestamp of the last trade.", + ) + tick: List[Union[str, None]] = Field( + default_factory=list, + description="Whether the last tick was up or down in price.", + ) + bid: List[Union[float, None]] = Field( + default_factory=list, + description="Current bid price for the option.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + bid_size: List[Union[int, float, None]] = Field( + default_factory=list, description="Bid size for the option." + ) + bid_time: List[Union[datetime, None]] = Field( + default_factory=list, + description="The timestamp of the bid price.", + ) + bid_exchange: List[Union[str, None]] = Field( + default_factory=list, description="The exchange of the bid price." + ) + ask: List[Union[float, None]] = Field( + default_factory=list, + description="Current ask price for the option.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + ask_size: List[Union[int, float, None]] = Field( + default_factory=list, description="Ask size for the option." + ) + ask_time: List[Union[datetime, None]] = Field( + default_factory=list, + description="The timestamp of the ask price.", + ) + ask_exchange: List[Union[str, None]] = Field( + default_factory=list, description="The exchange of the ask price." + ) + mark: List[Union[float, None]] = Field( + default_factory=list, + description="The mid-price between the latest bid and ask.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + open: List[Union[float, None]] = Field( + default_factory=list, + description=DATA_DESCRIPTIONS.get("open", ""), + json_schema_extra={"x-unit_measurement": "currency"}, + ) + open_bid: List[Union[float, None]] = Field( + default_factory=list, + description="The opening bid price for the option that day.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + open_ask: List[Union[float, None]] = Field( + default_factory=list, + description="The opening ask price for the option that day.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + high: List[Union[float, None]] = Field( + default_factory=list, + description=DATA_DESCRIPTIONS.get("high", ""), + json_schema_extra={"x-unit_measurement": "currency"}, + ) + bid_high: List[Union[float, None]] = Field( + default_factory=list, + description="The highest bid price for the option that day.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + ask_high: List[Union[float, None]] = Field( + default_factory=list, + description="The highest ask price for the option that day.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + low: List[Union[float, None]] = Field( + default_factory=list, + description=DATA_DESCRIPTIONS.get("low", ""), + json_schema_extra={"x-unit_measurement": "currency"}, + ) + bid_low: List[Union[float, None]] = Field( + default_factory=list, + description="The lowest bid price for the option that day.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + ask_low: List[Union[float, None]] = Field( + default_factory=list, + description="The lowest ask price for the option that day.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + close: List[Union[float, None]] = Field( + default_factory=list, + description=DATA_DESCRIPTIONS.get("close", ""), + json_schema_extra={"x-unit_measurement": "currency"}, + ) + close_size: List[Union[int, float, None]] = Field( + default_factory=list, + description="The closing trade size for the option that day.", + ) + close_time: List[Union[datetime, None]] = Field( + default_factory=list, + description="The time of the closing price for the option that day.", + ) + close_bid: List[Union[float, None]] = Field( + default_factory=list, + description="The closing bid price for the option that day.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + close_bid_size: List[Union[int, float, None]] = Field( + default_factory=list, + description="The closing bid size for the option that day.", + ) + close_bid_time: List[Union[datetime, None]] = Field( + default_factory=list, + description="The time of the bid closing price for the option that day.", + ) + close_ask: List[Union[float, None]] = Field( + default_factory=list, + description="The closing ask price for the option that day.", + ) + close_ask_size: List[Union[int, float, None]] = Field( + default_factory=list, + description="The closing ask size for the option that day.", + ) + close_ask_time: List[Union[datetime, None]] = Field( + default_factory=list, + description="The time of the ask closing price for the option that day.", + ) + prev_close: List[Union[float, None]] = Field( + default_factory=list, + description=DATA_DESCRIPTIONS.get("prev_close", ""), + json_schema_extra={"x-unit_measurement": "currency"}, + ) + change: List[Union[float, None]] = Field( + default_factory=list, description="The change in the price of the option." + ) + change_percent: List[Union[float, None]] = Field( + default_factory=list, + description="Change, in normalized percentage points, of the option.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + implied_volatility: List[Union[float, None]] = Field( + default_factory=list, + description="Implied volatility of the option.", + json_schema_extra={"x-unit_measurement": "decimal"}, + ) + delta: List[Union[float, None]] = Field( + default_factory=list, + description="Delta of the option.", + json_schema_extra={"x-unit_measurement": "decimal"}, + ) + gamma: List[Union[float, None]] = Field( + default_factory=list, + description="Gamma of the option.", + json_schema_extra={"x-unit_measurement": "decimal"}, + ) + theta: List[Union[float, None]] = Field( + default_factory=list, + description="Theta of the option.", + json_schema_extra={"x-unit_measurement": "decimal"}, + ) + vega: List[Union[float, None]] = Field( + default_factory=list, + description="Vega of the option.", + json_schema_extra={"x-unit_measurement": "decimal"}, + ) + rho: List[Union[float, None]] = Field( + default_factory=list, + description="Rho of the option.", + json_schema_extra={"x-unit_measurement": "decimal"}, + ) + + @field_validator("expiration", mode="before", check_fields=False) + @classmethod + def _date_validate(cls, v): + """Return the datetime object from the date string.""" + if isinstance(v[0], datetime): + return [datetime.strftime(d, "%Y-%m-%d") if d else None for d in v] + if isinstance(v[0], str): + return [datetime.strptime(d, "%Y-%m-%d") if d else None for d in v] + return v + + @model_serializer + def model_serialize(self): + """Return the serialized data.""" + data: dict = {} + for field in self.model_fields: + value = getattr(self, field) + if isinstance(value, list): + if value: # Check if the list is not empty + if isinstance(value[0], datetime): + data[field] = [str(v) if v else None for v in value] + else: + data[field] = value + else: + data[field] = value + + records = [dict(zip(data.keys(), values)) for values in zip(*data.values())] + + return records diff --git a/openbb_platform/core/openbb_core/provider/standard_models/options_snapshots.py b/openbb_platform/core/openbb_core/provider/standard_models/options_snapshots.py new file mode 100644 index 0000000000000000000000000000000000000000..4d01da1a9a50966c9087d8193fbb56f42fbb6b10 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/options_snapshots.py @@ -0,0 +1,78 @@ +"""Options Snapshots Standard Model.""" + +from datetime import ( + date as dateType, + datetime, +) +from typing import List, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class OptionsSnapshotsQueryParams(QueryParams): + """Options Snapshots Query.""" + + +class OptionsSnapshotsData(Data): + """Options Snapshots Data.""" + + underlying_symbol: List[str] = Field( + description="Ticker symbol of the underlying asset." + ) + contract_symbol: List[str] = Field(description="Symbol of the options contract.") + expiration: List[dateType] = Field( + description="Expiration date of the options contract." + ) + dte: List[Union[int, None]] = Field( + default_factory=list, + description="Number of days to expiration of the options contract.", + ) + strike: List[float] = Field( + description="Strike price of the options contract.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + option_type: List[str] = Field(description="The type of option.") + volume: List[Union[int, None]] = Field( + default_factory=list, + description=DATA_DESCRIPTIONS.get("volume", ""), + ) + open_interest: List[Union[int, None]] = Field( + default_factory=list, + description="Open interest at the time.", + ) + last_price: List[Union[float, None]] = Field( + default_factory=list, + description="Last trade price at the time.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + last_size: List[Union[int, None]] = Field( + default_factory=list, + description="Lot size of the last trade.", + ) + last_timestamp: List[Union[datetime, None]] = Field( + default_factory=list, + description="Timestamp of the last price.", + ) + open: List[Union[float, None]] = Field( + default_factory=list, + description=DATA_DESCRIPTIONS.get("open", ""), + json_schema_extra={"x-unit_measurement": "currency"}, + ) + high: List[Union[float, None]] = Field( + default_factory=list, + description=DATA_DESCRIPTIONS.get("high", ""), + json_schema_extra={"x-unit_measurement": "currency"}, + ) + low: List[Union[float, None]] = Field( + default_factory=list, + description=DATA_DESCRIPTIONS.get("low", ""), + json_schema_extra={"x-unit_measurement": "currency"}, + ) + close: List[Union[float, None]] = Field( + default_factory=list, + description=DATA_DESCRIPTIONS.get("close", ""), + json_schema_extra={"x-unit_measurement": "currency"}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/options_unusual.py b/openbb_platform/core/openbb_core/provider/standard_models/options_unusual.py new file mode 100644 index 0000000000000000000000000000000000000000..dcbdd7f7c56a7b5118596a665843e3c53eb98af4 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/options_unusual.py @@ -0,0 +1,36 @@ +"""Unusual Options Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class OptionsUnusualQueryParams(QueryParams): + """Unusual Options Query.""" + + symbol: Optional[str] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("symbol", "") + " (the underlying symbol)", + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str): + """Convert field to uppercase.""" + return v.upper() if v else None + + +class OptionsUnusualData(Data): + """Unusual Options Data.""" + + underlying_symbol: Optional[str] = Field( + description=DATA_DESCRIPTIONS.get("symbol", "") + " (the underlying symbol)", + default=None, + ) + contract_symbol: str = Field(description="Contract symbol for the option.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/otc_aggregate.py b/openbb_platform/core/openbb_core/provider/standard_models/otc_aggregate.py new file mode 100644 index 0000000000000000000000000000000000000000..0085c8b7a40f10adbb0977cf4e0b5049c0a55c8d --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/otc_aggregate.py @@ -0,0 +1,32 @@ +"""OTC Aggregate Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field + + +class OTCAggregateQueryParams(QueryParams): + """OTC Aggregate Query.""" + + symbol: Optional[str] = Field( + description=QUERY_DESCRIPTIONS.get("symbol", ""), + default=None, + ) + + +class OTCAggregateData(Data): + """OTC Aggregate Data.""" + + update_date: dateType = Field( + description="Most recent date on which total trades is updated based on data received from each ATS/OTC." + ) + share_quantity: float = Field( + description="Aggregate weekly total number of shares reported by each ATS for the Symbol." + ) + trade_quantity: float = Field( + description="Aggregate weekly total number of trades reported by each ATS for the Symbol" + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/overnight_bank_funding_rate.py b/openbb_platform/core/openbb_core/provider/standard_models/overnight_bank_funding_rate.py new file mode 100644 index 0000000000000000000000000000000000000000..20cd2c551d6925478f352a91d54490d2150db28b --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/overnight_bank_funding_rate.py @@ -0,0 +1,64 @@ +"""Overnight Bank Funding Rate Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class OvernightBankFundingRateQueryParams(QueryParams): + """Overnight Bank Funding Rate Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class OvernightBankFundingRateData(Data): + """Overnight Bank Funding Rate Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + rate: float = Field( + description="Overnight Bank Funding Rate.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percentile_1: Optional[float] = Field( + default=None, + description="1st percentile of the distribution.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percentile_25: Optional[float] = Field( + default=None, + description="25th percentile of the distribution.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percentile_75: Optional[float] = Field( + default=None, + description="75th percentile of the distribution.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percentile_99: Optional[float] = Field( + default=None, + description="99th percentile of the distribution.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + volume: Optional[float] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("volume", "") + + "The notional volume of transactions (Billions of $).", + json_schema_extra={ + "x-unit_measurement": "currency", + "x-frontend_multiply": 1e9, + }, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/personal_consumption_expenditures.py b/openbb_platform/core/openbb_core/provider/standard_models/personal_consumption_expenditures.py new file mode 100644 index 0000000000000000000000000000000000000000..3756909700446ade7cc4bb52b8bd7c457f9be9af --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/personal_consumption_expenditures.py @@ -0,0 +1,30 @@ +"""Personal Consumption Expenditures Standard Model.""" + +from datetime import date as dateType +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class PersonalConsumptionExpendituresQueryParams(QueryParams): + """Personal Consumption Expenditures Query.""" + + date: Optional[Union[dateType, str]] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("date", "") + + " Default is the latest report.", + ) + + +class PersonalConsumptionExpendituresData(Data): + """Personal Consumption Expenditures Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + value: float = Field(description=DATA_DESCRIPTIONS.get("value", "")) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/petroleum_status_report.py b/openbb_platform/core/openbb_core/provider/standard_models/petroleum_status_report.py new file mode 100644 index 0000000000000000000000000000000000000000..6d7b30c78a9a818ed2f60547c7315df89e377bb9 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/petroleum_status_report.py @@ -0,0 +1,39 @@ +"""Petroleum Status Report Standard Model.""" + +from datetime import date as dateType +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class PetroleumStatusReportQueryParams(QueryParams): + """Petroleum Status Report Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class PetroleumStatusReportData(Data): + """Petroleum Status Report Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + table: Optional[str] = Field(description="Table name for the data.") + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + order: Optional[int] = Field( + default=None, description="Presented order of the data, relative to the table." + ) + title: Optional[str] = Field(default=None, description="Title of the data.") + value: Union[int, float] = Field(description="Value of the data.") + unit: Optional[str] = Field(default=None, description="Unit or scale of the data.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/port_volume.py b/openbb_platform/core/openbb_core/provider/standard_models/port_volume.py new file mode 100644 index 0000000000000000000000000000000000000000..478aa6e0d291160de83e0ced3d1b5f0d6a2cc552 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/port_volume.py @@ -0,0 +1,34 @@ +"""Port Volume Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class PortVolumeQueryParams(QueryParams): + """Port Volume Query.""" + + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date", "") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date", "") + ) + + +class PortVolumeData(Data): + """Port Volume Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + port_code: Optional[str] = Field(default=None, description="Port code.") + port_name: Optional[str] = Field(default=None, description="Port name.") + country: Optional[str] = Field( + default=None, description="Country where the port is located." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/price_target.py b/openbb_platform/core/openbb_core/provider/standard_models/price_target.py new file mode 100644 index 0000000000000000000000000000000000000000..9cdd9e0f875b40f7bd00fe8443e841aa4584fc23 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/price_target.py @@ -0,0 +1,85 @@ +"""Price Target Standard Model.""" + +from datetime import ( + date as dateType, + datetime, + time, +) +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, NonNegativeInt, field_validator + + +class PriceTargetQueryParams(QueryParams): + """Price Target Query.""" + + symbol: Optional[str] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("symbol", "") + ) + limit: NonNegativeInt = Field( + default=200, description=QUERY_DESCRIPTIONS.get("limit", "") + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str): + """Convert field to uppercase.""" + return v.upper() if v else None + + +class PriceTargetData(Data): + """Price Target Data.""" + + published_date: Union[dateType, datetime] = Field( + description="Published date of the price target." + ) + published_time: Optional[time] = Field( + default=None, description="Time of the original rating, UTC." + ) + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + exchange: Optional[str] = Field( + default=None, description="Exchange where the company is traded." + ) + company_name: Optional[str] = Field( + default=None, description="Name of company that is the subject of rating." + ) + analyst_name: Optional[str] = Field(default=None, description="Analyst name.") + analyst_firm: Optional[str] = Field( + default=None, + description="Name of the analyst firm that published the price target.", + ) + currency: Optional[str] = Field( + default=None, description="Currency the data is denominated in." + ) + price_target: Optional[float] = Field( + default=None, description="The current price target." + ) + adj_price_target: Optional[float] = Field( + default=None, + description="Adjusted price target for splits and stock dividends.", + ) + price_target_previous: Optional[float] = Field( + default=None, description="Previous price target." + ) + previous_adj_price_target: Optional[float] = Field( + default=None, description="Previous adjusted price target." + ) + price_when_posted: Optional[float] = Field( + default=None, description="Price when posted." + ) + rating_current: Optional[str] = Field( + default=None, description="The analyst's rating for the company." + ) + rating_previous: Optional[str] = Field( + default=None, description="Previous analyst rating for the company." + ) + action: Optional[str] = Field( + default=None, + description="Description of the change in rating from firm's last rating.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/price_target_consensus.py b/openbb_platform/core/openbb_core/provider/standard_models/price_target_consensus.py new file mode 100644 index 0000000000000000000000000000000000000000..9dd4ef8bc9ce48b5f90ce785f28afe09deec8b80 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/price_target_consensus.py @@ -0,0 +1,52 @@ +"""Price Target Consensus Standard Model.""" + +from typing import List, Optional, Set, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class PriceTargetConsensusQueryParams(QueryParams): + """Price Target Consensus Query.""" + + symbol: Optional[str] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("symbol", "") + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v): + """Convert field to uppercase.""" + return v.upper() if v else None + + +class PriceTargetConsensusData(Data): + """Price Target Consensus Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + name: Optional[str] = Field(default=None, description="The company name") + target_high: Optional[float] = Field( + default=None, description="High target of the price target consensus." + ) + target_low: Optional[float] = Field( + default=None, description="Low target of the price target consensus." + ) + target_consensus: Optional[float] = Field( + default=None, description="Consensus target of the price target consensus." + ) + target_median: Optional[float] = Field( + default=None, description="Median target of the price target consensus." + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: Union[str, List[str], Set[str]]): + """Convert field to uppercase.""" + if isinstance(v, str): + return v.upper() + return ",".join([symbol.upper() for symbol in list(v)]) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/primary_dealer_fails.py b/openbb_platform/core/openbb_core/provider/standard_models/primary_dealer_fails.py new file mode 100644 index 0000000000000000000000000000000000000000..f74105425e9da328d0e086d9610b3c1943edc84f --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/primary_dealer_fails.py @@ -0,0 +1,32 @@ +"""Primray Dealer Fails Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class PrimaryDealerFailsQueryParams(QueryParams): + """Primary Dealer Fails Query.""" + + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date", "") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date", "") + ) + + +class PrimaryDealerFailsData(Data): + """Primary Dealer Fails Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/primary_dealer_positioning.py b/openbb_platform/core/openbb_core/provider/standard_models/primary_dealer_positioning.py new file mode 100644 index 0000000000000000000000000000000000000000..8dd3b7940e1a1784ec097df56688cd78dbf726b6 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/primary_dealer_positioning.py @@ -0,0 +1,32 @@ +"""Primray Dealer Positioning Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class PrimaryDealerPositioningQueryParams(QueryParams): + """Primary Dealer Positioning Query.""" + + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date", "") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date", "") + ) + + +class PrimaryDealerPositioningData(Data): + """Primary Dealer Positioning Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/recent_performance.py b/openbb_platform/core/openbb_core/provider/standard_models/recent_performance.py new file mode 100644 index 0000000000000000000000000000000000000000..ae41475544499883ad33958a9bdf4676b46db7fc --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/recent_performance.py @@ -0,0 +1,111 @@ +"""Recent Performance Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class RecentPerformanceQueryParams(QueryParams): + """Recent Performance Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol") + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class RecentPerformanceData(Data): + """Recent Performance Data. All returns are normalized percents.""" + + symbol: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("symbol", "") + ) + one_day: Optional[float] = Field( + default=None, + description="One-day return.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + wtd: Optional[float] = Field( + default=None, + description="Week to date return.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + one_week: Optional[float] = Field( + default=None, + description="One-week return.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + mtd: Optional[float] = Field( + default=None, + description="Month to date return.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + one_month: Optional[float] = Field( + default=None, + description="One-month return.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + qtd: Optional[float] = Field( + default=None, + description="Quarter to date return.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + three_month: Optional[float] = Field( + default=None, + description="Three-month return.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + six_month: Optional[float] = Field( + default=None, + description="Six-month return.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + ytd: Optional[float] = Field( + default=None, + description="Year to date return.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + one_year: Optional[float] = Field( + default=None, + description="One-year return.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + two_year: Optional[float] = Field( + default=None, + description="Two-year return.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + three_year: Optional[float] = Field( + default=None, + description="Three-year return.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + four_year: Optional[float] = Field( + default=None, + description="Four-year", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + five_year: Optional[float] = Field( + default=None, + description="Five-year return.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + ten_year: Optional[float] = Field( + default=None, + description="Ten-year return.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + max: Optional[float] = Field( + default=None, + description="Return from the beginning of the time series.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/reported_financials.py b/openbb_platform/core/openbb_core/provider/standard_models/reported_financials.py new file mode 100644 index 0000000000000000000000000000000000000000..287d7562b414d5ad582819006b98c3491171f649 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/reported_financials.py @@ -0,0 +1,69 @@ +"""Reported Financials.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator, model_validator + + +class ReportedFinancialsQueryParams(QueryParams): + """Reported Financials Query Params.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + period: str = Field( + default="annual", description=QUERY_DESCRIPTIONS.get("period", "") + ) + statement_type: str = Field( + default="balance", + description="The type of financial statement - i.e, balance, income, cash.", + ) + limit: Optional[int] = Field( + default=100, + description=( + QUERY_DESCRIPTIONS.get("limit", "") + + " Although the response object contains multiple results," + + " because of the variance in the fields, year-to-year and quarter-to-quarter," + + " it is recommended to view results in small chunks." + ), + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str): + """Convert field to uppercase.""" + return v.upper() + + @field_validator("period", "statement_type", mode="before", check_fields=False) + @classmethod + def to_lower(cls, v: Optional[str]) -> Optional[str]: + """Convert field to lowercase.""" + return v.lower() if v else v + + +class ReportedFinancialsData(Data): + """Reported Financials Data.""" + + period_ending: dateType = Field( + description="The ending date of the reporting period." + ) + fiscal_period: str = Field( + description="The fiscal period of the report (e.g. FY, Q1, etc.)." + ) + fiscal_year: Optional[int] = Field( + description="The fiscal year of the fiscal period.", default=None + ) + + @model_validator(mode="before") + @classmethod + def replace_zero(cls, values): # pylint: disable=no-self-argument + """Check for zero values and replace with None.""" + return ( + {k: None if v == 0 else v for k, v in values.items()} + if isinstance(values, dict) + else values + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/retail_prices.py b/openbb_platform/core/openbb_core/provider/standard_models/retail_prices.py new file mode 100644 index 0000000000000000000000000000000000000000..0f042489399fa055985614607e8f1a1082b4c336 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/retail_prices.py @@ -0,0 +1,55 @@ +"""Retail Prices Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class RetailPricesQueryParams(QueryParams): + """Retail Prices Query.""" + + item: Optional[str] = Field( + default=None, + description="The item or basket of items to query.", + ) + country: str = Field( + description=QUERY_DESCRIPTIONS.get("country", ""), + default="united_states", + ) + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date") + ) + + +class RetailPricesData(Data): + """Retail Prices Data.""" + + date: Optional[dateType] = Field( + default=None, description=DATA_DESCRIPTIONS.get("date") + ) + symbol: Optional[str] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("symbol", ""), + ) + country: Optional[str] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("country", ""), + ) + description: str = Field( + default=None, + description="Description of the item.", + ) + value: Optional[float] = Field( + default=None, + description="Price, or change in price, per unit.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/revenue_business_line.py b/openbb_platform/core/openbb_core/provider/standard_models/revenue_business_line.py new file mode 100644 index 0000000000000000000000000000000000000000..57a2c9c3f272f62d824f541a1c5352edd2692f63 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/revenue_business_line.py @@ -0,0 +1,44 @@ +"""Revenue By Business Line Standard Model.""" + +from datetime import date as dateType +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field, field_validator + + +class RevenueBusinessLineQueryParams(QueryParams): + """Revenue By Business Line Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str): + """Convert field to uppercase.""" + return v.upper() + + +class RevenueBusinessLineData(Data): + """Revenue By Business Line Data.""" + + period_ending: dateType = Field(description="The end date of the reporting period.") + fiscal_period: Optional[str] = Field( + default=None, description="The fiscal period of the reporting period." + ) + fiscal_year: Optional[int] = Field( + default=None, description="The fiscal year of the reporting period." + ) + filing_date: Optional[dateType] = Field( + default=None, description="The filing date of the report." + ) + business_line: Optional[str] = Field( + default=None, + description="The business line represented by the revenue data.", + ) + revenue: Union[int, float] = Field( + description="The total revenue attributed to the business line.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/revenue_geographic.py b/openbb_platform/core/openbb_core/provider/standard_models/revenue_geographic.py new file mode 100644 index 0000000000000000000000000000000000000000..1684bbc506fc8b3dcf9e89c564aec2643d2bc13d --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/revenue_geographic.py @@ -0,0 +1,44 @@ +"""Revenue by Geographic Segments Standard Model.""" + +from datetime import date as dateType +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field, field_validator + + +class RevenueGeographicQueryParams(QueryParams): + """Revenue by Geographic Segments Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str): + """Convert field to uppercase.""" + return v.upper() + + +class RevenueGeographicData(Data): + """Revenue by Geographic Segments Data.""" + + period_ending: dateType = Field(description="The end date of the reporting period.") + fiscal_period: Optional[str] = Field( + default=None, description="The fiscal period of the reporting period." + ) + fiscal_year: Optional[int] = Field( + default=None, description="The fiscal year of the reporting period." + ) + filing_date: Optional[dateType] = Field( + default=None, description="The filing date of the report." + ) + region: Optional[str] = Field( + default=None, + description="The region represented by the revenue data.", + ) + revenue: Union[int, float] = Field( + description="The total revenue attributed to the region.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/risk_premium.py b/openbb_platform/core/openbb_core/provider/standard_models/risk_premium.py new file mode 100644 index 0000000000000000000000000000000000000000..4431d481279a4d28ef29102be3b906e9ca95e62d --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/risk_premium.py @@ -0,0 +1,26 @@ +"""Risk Premium Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from pydantic import Field, NonNegativeFloat, PositiveFloat + + +class RiskPremiumQueryParams(QueryParams): + """Risk Premium Query.""" + + +class RiskPremiumData(Data): + """Risk Premium Data.""" + + country: str = Field(description="Market country.") + continent: Optional[str] = Field( + default=None, description="Continent of the country." + ) + total_equity_risk_premium: Optional[PositiveFloat] = Field( + default=None, description="Total equity risk premium for the country." + ) + country_risk_premium: Optional[NonNegativeFloat] = Field( + default=None, description="Country-specific risk premium." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/search_attributes.py b/openbb_platform/core/openbb_core/provider/standard_models/search_attributes.py new file mode 100644 index 0000000000000000000000000000000000000000..dd2d8b14bc86d5a81f02d8d97f271946549656c0 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/search_attributes.py @@ -0,0 +1,48 @@ +"""Search Attributes Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field + + +class SearchAttributesQueryParams(QueryParams): + """Search Attributes Query.""" + + query: str = Field(description="Query to search for.") + limit: Optional[int] = Field( + default=1000, description=QUERY_DESCRIPTIONS.get("limit") + ) + + +class SearchAttributesData(Data): + """Search Attributes Data.""" + + id: str = Field(description="ID of the financial attribute.") + name: str = Field(description="Name of the financial attribute.") + tag: str = Field(description="Tag of the financial attribute.") + statement_code: str = Field(description="Code of the financial statement.") + statement_type: Optional[str] = Field( + default=None, description="Type of the financial statement." + ) + parent_name: Optional[str] = Field( + default=None, description="Parent's name of the financial attribute." + ) + sequence: Optional[int] = Field( + default=None, description="Sequence of the financial statement." + ) + factor: Optional[str] = Field( + default=None, description="Unit of the financial attribute." + ) + transaction: Optional[str] = Field( + default=None, + description="Transaction type (credit/debit) of the financial attribute.", + ) + type: Optional[str] = Field( + default=None, description="Type of the financial attribute." + ) + unit: Optional[str] = Field( + default=None, description="Unit of the financial attribute." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/search_financial_attributes.py b/openbb_platform/core/openbb_core/provider/standard_models/search_financial_attributes.py new file mode 100644 index 0000000000000000000000000000000000000000..62f6ffef63e3683eee129ffc479237bab009a446 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/search_financial_attributes.py @@ -0,0 +1,48 @@ +"""Search Financial Attributes Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field + + +class SearchFinancialAttributesQueryParams(QueryParams): + """Search Financial Attributes Query.""" + + query: str = Field(description="Query to search for.") + limit: Optional[int] = Field( + default=1000, description=QUERY_DESCRIPTIONS.get("limit") + ) + + +class SearchFinancialAttributesData(Data): + """Search Financial Attributes Data.""" + + id: str = Field(description="ID of the financial attribute.") + name: str = Field(description="Name of the financial attribute.") + tag: str = Field(description="Tag of the financial attribute.") + statement_code: str = Field(description="Code of the financial statement.") + statement_type: Optional[str] = Field( + default=None, description="Type of the financial statement." + ) + parent_name: Optional[str] = Field( + default=None, description="Parent's name of the financial attribute." + ) + sequence: Optional[int] = Field( + default=None, description="Sequence of the financial statement." + ) + factor: Optional[str] = Field( + default=None, description="Unit of the financial attribute." + ) + transaction: Optional[str] = Field( + default=None, + description="Transaction type (credit/debit) of the financial attribute.", + ) + type: Optional[str] = Field( + default=None, description="Type of the financial attribute." + ) + unit: Optional[str] = Field( + default=None, description="Unit of the financial attribute." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/sector_pe.py b/openbb_platform/core/openbb_core/provider/standard_models/sector_pe.py new file mode 100644 index 0000000000000000000000000000000000000000..23fef084c7d445ee313735d380060be9069db0e2 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/sector_pe.py @@ -0,0 +1,26 @@ +"""Sector P/E Ratio Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import DATA_DESCRIPTIONS +from pydantic import Field + + +class SectorPEQueryParams(QueryParams): + """Sector P/E Ratio Query.""" + + +class SectorPEData(Data): + """Sector P/E Ratio Data.""" + + date: Optional[dateType] = Field( + description=DATA_DESCRIPTIONS.get("date", ""), default=None + ) + exchange: Optional[str] = Field( + default=None, description="The exchange where the data is from." + ) + sector: str = Field(description="The name of the sector.") + pe: float = Field(description="The P/E ratio of the sector.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/sector_performance.py b/openbb_platform/core/openbb_core/provider/standard_models/sector_performance.py new file mode 100644 index 0000000000000000000000000000000000000000..c8db32ba44507254b924798106507337690164f1 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/sector_performance.py @@ -0,0 +1,16 @@ +"""Sector Performance Standard Model.""" + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from pydantic import Field + + +class SectorPerformanceQueryParams(QueryParams): + """Sector Performance Query.""" + + +class SectorPerformanceData(Data): + """Sector Performance Data.""" + + sector: str = Field(description="The name of the sector.") + change_percent: float = Field(description="The change in percent from open.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/senior_loan_officer_survey.py b/openbb_platform/core/openbb_core/provider/standard_models/senior_loan_officer_survey.py new file mode 100644 index 0000000000000000000000000000000000000000..c3e5c4395c3d356a4d9b5f99dd68c422a82cffab --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/senior_loan_officer_survey.py @@ -0,0 +1,38 @@ +"""Senior Loan Officer Opinion Survey Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class SeniorLoanOfficerSurveyQueryParams(QueryParams): + """Senior Loan Officer Opinion Survey Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class SeniorLoanOfficerSurveyData(Data): + """Senior Loan Officer Opinion Survey Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + symbol: Optional[str] = Field( + default=None, description=DATA_DESCRIPTIONS.get("symbol", "") + ) + value: float = Field(description="Survey value.") + title: Optional[str] = Field(description="Survey title.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/share_price_index.py b/openbb_platform/core/openbb_core/provider/standard_models/share_price_index.py new file mode 100644 index 0000000000000000000000000000000000000000..cf3ddd6f919da55a81ed9fdfa099cb93cfea6000 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/share_price_index.py @@ -0,0 +1,48 @@ +"""Share Price Index Standard Model.""" + +from datetime import date as dateType +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class SharePriceIndexQueryParams(QueryParams): + """Share Price Index Query.""" + + country: str = Field( + description=QUERY_DESCRIPTIONS.get("country", ""), + default="united_states", + ) + frequency: Literal["monthly", "quarter", "annual"] = Field( + description=QUERY_DESCRIPTIONS.get("frequency", ""), + default="monthly", + json_schema_extra={"choices": ["monthly", "quarter", "annual"]}, + ) + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date") + ) + + +class SharePriceIndexData(Data): + """Share Price Index Data.""" + + date: Optional[dateType] = Field( + default=None, description=DATA_DESCRIPTIONS.get("date") + ) + country: Optional[str] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("country", ""), + ) + value: Optional[float] = Field( + default=None, + description="Share price index value.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/share_statistics.py b/openbb_platform/core/openbb_core/provider/standard_models/share_statistics.py new file mode 100644 index 0000000000000000000000000000000000000000..10eca8e12755a5ef86a9a78cdbad33622c9580c0 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/share_statistics.py @@ -0,0 +1,55 @@ +"""Share Statistics Standard Model.""" + +from datetime import date as dateType +from typing import List, Optional, Set, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class ShareStatisticsQueryParams(QueryParams): + """Share Statistics Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: str) -> str: + """Convert field to uppercase.""" + return v.upper() + + +class ShareStatisticsData(Data): + """Share Statistics Data.""" + + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + date: Optional[dateType] = Field( + default=None, description=DATA_DESCRIPTIONS.get("date", "") + ) + free_float: Optional[float] = Field( + default=None, + description="Percentage of unrestricted shares of a publicly-traded company.", + ) + float_shares: Optional[float] = Field( + default=None, + description="Number of shares available for trading by the general public.", + ) + outstanding_shares: Optional[float] = Field( + default=None, description="Total number of shares of a publicly-traded company." + ) + source: Optional[str] = Field( + default=None, description="Source of the received data." + ) + + @field_validator("symbol", mode="before", check_fields=False) + @classmethod + def to_upper(cls, v: Union[str, List[str], Set[str]]): + """Convert field to uppercase.""" + if isinstance(v, str): + return v.upper() + return ",".join([symbol.upper() for symbol in list(v)]) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/short_term_energy_outlook.py b/openbb_platform/core/openbb_core/provider/standard_models/short_term_energy_outlook.py new file mode 100644 index 0000000000000000000000000000000000000000..06734a146d3a48906c095cf8db8018722f752348 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/short_term_energy_outlook.py @@ -0,0 +1,39 @@ +"""Short Term Energy Outlook Standard Model.""" + +from datetime import date as dateType +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class ShortTermEnergyOutlookQueryParams(QueryParams): + """Short Term Energy Outlook Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class ShortTermEnergyOutlookData(Data): + """Short Term Energy Outlook Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + table: Optional[str] = Field(default=None, description="Table name for the data.") + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + order: Optional[int] = Field( + default=None, description="Presented order of the data, relative to the table." + ) + title: Optional[str] = Field(default=None, description="Title of the data.") + value: Union[int, float] = Field(description="Value of the data.") + unit: Optional[str] = Field(default=None, description="Unit or scale of the data.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/short_term_interest_rate.py b/openbb_platform/core/openbb_core/provider/standard_models/short_term_interest_rate.py new file mode 100644 index 0000000000000000000000000000000000000000..057584597f03e9ce0f2ea3d98d6e813d74a7d695 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/short_term_interest_rate.py @@ -0,0 +1,39 @@ +"""Short Term Interest Rates Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class STIRQueryParams(QueryParams): + """Short Term Interest Rates Query.""" + + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date") + ) + + +class STIRData(Data): + """Short Term Interest Rates Data.""" + + date: Optional[dateType] = Field( + default=None, description=DATA_DESCRIPTIONS.get("date") + ) + value: Optional[float] = Field( + default=None, + description="Interest rate (given as a whole number, i.e 10=10%)", + ) + country: Optional[str] = Field( + default=None, + description="Country for which interest rate is given", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/short_volume.py b/openbb_platform/core/openbb_core/provider/standard_models/short_volume.py new file mode 100644 index 0000000000000000000000000000000000000000..8098852147328f8f081130280f8a843251550586 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/short_volume.py @@ -0,0 +1,49 @@ +"""Short Volume Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class ShortVolumeQueryParams(QueryParams): + """Short Volume Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol")) + + +class ShortVolumeData(Data): + """Short Volume Data.""" + + date: Optional[dateType] = Field( + default=None, description=DATA_DESCRIPTIONS.get("date") + ) + + market: Optional[str] = Field( + default=None, + description="Reporting Facility ID. N=NYSE TRF, Q=NASDAQ TRF Carteret, B=NASDAQ TRY Chicago, D=FINRA ADF", + ) + + short_volume: Optional[int] = Field( + default=None, + description=( + "Aggregate reported share volume of executed short sale " + "and short sale exempt trades during regular trading hours" + ), + ) + + short_exempt_volume: Optional[int] = Field( + default=None, + description="Aggregate reported share volume of executed short sale exempt trades during regular trading hours", + ) + + total_volume: Optional[int] = Field( + default=None, + description="Aggregate reported share volume of executed trades during regular trading hours", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/sofr.py b/openbb_platform/core/openbb_core/provider/standard_models/sofr.py new file mode 100644 index 0000000000000000000000000000000000000000..e8786e9e84183d92caac0e31d233edb524780134 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/sofr.py @@ -0,0 +1,64 @@ +"""Secured Overnight Financing Rate Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class SOFRQueryParams(QueryParams): + """Secured Overnight Financing Rate Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class SOFRData(Data): + """SOFR Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + rate: float = Field( + description="Effective federal funds rate.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percentile_1: Optional[float] = Field( + default=None, + description="1st percentile of the distribution.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percentile_25: Optional[float] = Field( + default=None, + description="25th percentile of the distribution.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percentile_75: Optional[float] = Field( + default=None, + description="75th percentile of the distribution.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + percentile_99: Optional[float] = Field( + default=None, + description="99th percentile of the distribution.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + volume: Optional[float] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("volume", "") + + "The notional volume of transactions (Billions of $).", + json_schema_extra={ + "x-unit_measurement": "currency", + "x-frontend_multiply": 1e9, + }, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/sonia_rates.py b/openbb_platform/core/openbb_core/provider/standard_models/sonia_rates.py new file mode 100644 index 0000000000000000000000000000000000000000..14fc2264079e03f286d5eb6cd0b551b08892b95d --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/sonia_rates.py @@ -0,0 +1,32 @@ +"""SONIA Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class SONIAQueryParams(QueryParams): + """SONIA Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class SONIAData(Data): + """SONIA Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + rate: Optional[float] = Field(description="SONIA rate.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/sp500_multiples.py b/openbb_platform/core/openbb_core/provider/standard_models/sp500_multiples.py new file mode 100644 index 0000000000000000000000000000000000000000..2fb390a8f5d88a5237d9425d98c5aec3cc57a948 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/sp500_multiples.py @@ -0,0 +1,78 @@ +"""SP500 Multiples Standard Model.""" + +from datetime import date as dateType +from typing import Literal, Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + +SERIES_NAME = Literal[ + "shiller_pe_month", + "shiller_pe_year", + "pe_year", + "pe_month", + "dividend_year", + "dividend_month", + "dividend_growth_quarter", + "dividend_growth_year", + "dividend_yield_year", + "dividend_yield_month", + "earnings_year", + "earnings_month", + "earnings_growth_year", + "earnings_growth_quarter", + "real_earnings_growth_year", + "real_earnings_growth_quarter", + "earnings_yield_year", + "earnings_yield_month", + "real_price_year", + "real_price_month", + "inflation_adjusted_price_year", + "inflation_adjusted_price_month", + "sales_year", + "sales_quarter", + "sales_growth_year", + "sales_growth_quarter", + "real_sales_year", + "real_sales_quarter", + "real_sales_growth_year", + "real_sales_growth_quarter", + "price_to_sales_year", + "price_to_sales_quarter", + "price_to_book_value_year", + "price_to_book_value_quarter", + "book_value_year", + "book_value_quarter", +] + + +class SP500MultiplesQueryParams(QueryParams): + """SP500 Multiples Query.""" + + series_name: Union[SERIES_NAME, str] = Field( + description="The name of the series. Defaults to 'pe_month'.", + default="pe_month", + ) + start_date: Optional[dateType] = Field( + description=QUERY_DESCRIPTIONS.get("start_date", ""), default=None + ) + end_date: Optional[dateType] = Field( + description=QUERY_DESCRIPTIONS.get("end_date", ""), default=None + ) + + +class SP500MultiplesData(Data): + """SP500 Multiples Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + name: str = Field( + description="Name of the series.", + ) + value: Union[int, float] = Field( + description="Value of the series.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/spot.py b/openbb_platform/core/openbb_core/provider/standard_models/spot.py new file mode 100644 index 0000000000000000000000000000000000000000..4980c65e42619f59471f7581acbc691e3f00a6e3 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/spot.py @@ -0,0 +1,47 @@ +"""Spot Rate Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class SpotRateQueryParams(QueryParams): + """Spot Rate Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + maturity: Union[float, str] = Field( + default=10.0, description="Maturities in years." + ) + category: str = Field( + default="spot_rate", + description="Rate category. Options: spot_rate, par_yield.", + ) + + @field_validator("category", mode="before", check_fields=False) + @classmethod + def to_lower(cls, v: Optional[str]) -> Optional[str]: + """Convert field to lowercase.""" + return v.lower() if v else v + + +class SpotRateData(Data): + """Spot Rate Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + rate: Optional[float] = Field(description="Spot Rate.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/survey_of_economic_conditions_chicago.py b/openbb_platform/core/openbb_core/provider/standard_models/survey_of_economic_conditions_chicago.py new file mode 100644 index 0000000000000000000000000000000000000000..fa5dbec9a42cdbf810f19a4ef5677b3d9efadc96 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/survey_of_economic_conditions_chicago.py @@ -0,0 +1,56 @@ +"""Survey Of Economic Conditions - Chicago - Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class SurveyOfEconomicConditionsChicagoQueryParams(QueryParams): + """Survey Of Economic Conditions - Chicago - Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class SurveyOfEconomicConditionsChicagoData(Data): + """Survey Of Economic Conditions - Chicago - Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + activity_index: Optional[float] = Field(default=None, description="Activity Index.") + one_year_outlook: Optional[float] = Field( + default=None, description="One Year Outlook Index." + ) + manufacturing_activity: Optional[float] = Field( + default=None, description="Manufacturing Activity Index." + ) + non_manufacturing_activity: Optional[float] = Field( + default=None, description="Non-Manufacturing Activity Index." + ) + capital_expenditures_expectations: Optional[float] = Field( + default=None, description="Capital Expenditures Expectations Index." + ) + hiring_expectations: Optional[float] = Field( + default=None, description="Hiring Expectations Index." + ) + current_hiring: Optional[float] = Field( + default=None, description="Current Hiring Index." + ) + labor_costs: Optional[float] = Field(default=None, description="Labor Costs Index.") + non_labor_costs: Optional[float] = Field( + default=None, description="Non-Labor Costs Index." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/symbol_map.py b/openbb_platform/core/openbb_core/provider/standard_models/symbol_map.py new file mode 100644 index 0000000000000000000000000000000000000000..fff5ecd6e54a76d9089526d1681006fa664cf7b5 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/symbol_map.py @@ -0,0 +1,16 @@ +"""Commitment of Traders Reports Search Standard Model.""" + +from typing import Optional + +from openbb_core.provider.abstract.query_params import QueryParams +from pydantic import Field + + +class SymbolMapQueryParams(QueryParams): + """Commitment of Traders Reports Search Query.""" + + query: str = Field(description="Search query.") + use_cache: Optional[bool] = Field( + default=True, + description="Whether or not to use cache. If True, cache will store for seven days.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/tbffr.py b/openbb_platform/core/openbb_core/provider/standard_models/tbffr.py new file mode 100644 index 0000000000000000000000000000000000000000..0c72d3347f8641e9f6ce0b949d4407a00dfa001f --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/tbffr.py @@ -0,0 +1,44 @@ +"""Selected Treasury Bill Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class SelectedTreasuryBillQueryParams(QueryParams): + """Selected Treasury Bill Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + maturity: Optional[Literal["3m", "6m"]] = Field( + default="3m", + description="The maturity", + ) + + @field_validator("maturity", mode="before", check_fields=False) + @classmethod + def to_lower(cls, v: Optional[str]) -> Optional[str]: + """Convert field to lowercase.""" + return v.lower() if v else v + + +class SelectedTreasuryBillData(Data): + """Selected Treasury Bill Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + rate: Optional[float] = Field(description="SelectedTreasuryBill Rate.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/tips_yields.py b/openbb_platform/core/openbb_core/provider/standard_models/tips_yields.py new file mode 100644 index 0000000000000000000000000000000000000000..14db13dae7eaaf6cb2d311ba0eff1301da8459df --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/tips_yields.py @@ -0,0 +1,48 @@ +"""TIPS (Treasury Inflation-Protected Securities) Yields Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class TipsYieldsQueryParams(QueryParams): + """TIPS Yields Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class TipsYieldsData(Data): + """TIPS Yields Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + symbol: Optional[str] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("symbol", ""), + ) + due: Optional[dateType] = Field( + default=None, + description="The due date (maturation date) of the security.", + ) + name: Optional[str] = Field( + default=None, + description="The name of the security.", + ) + value: float = Field( + default=None, + description="The yield value.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/tmc.py b/openbb_platform/core/openbb_core/provider/standard_models/tmc.py new file mode 100644 index 0000000000000000000000000000000000000000..b78b80e87d233caabdc11de4cff3c91121b6f73c --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/tmc.py @@ -0,0 +1,44 @@ +"""Treasury Constant Maturity Model.""" + +from datetime import ( + date as dateType, +) +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, field_validator + + +class TreasuryConstantMaturityQueryParams(QueryParams): + """Treasury Constant Maturity Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + maturity: Optional[Literal["3m", "2y"]] = Field( + default="3m", + description="The maturity", + ) + + @field_validator("maturity", mode="before", check_fields=False) + @classmethod + def to_lower(cls, v: Optional[str]) -> Optional[str]: + """Convert field to lowercase.""" + return v.lower() if v else v + + +class TreasuryConstantMaturityData(Data): + """Treasury Constant Maturity Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + rate: Optional[float] = Field(description="TreasuryConstantMaturity Rate.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/top_retail.py b/openbb_platform/core/openbb_core/provider/standard_models/top_retail.py new file mode 100644 index 0000000000000000000000000000000000000000..580ecf5d2773964c67907a4972aa5e1cbdecca40 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/top_retail.py @@ -0,0 +1,28 @@ +"""Top Retail Standard Model.""" + +from datetime import date as DateType + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class TopRetailQueryParams(QueryParams): + """Top Retail Search Query.""" + + limit: int = Field(description=QUERY_DESCRIPTIONS.get("limit", ""), default=5) + + +class TopRetailData(Data): + """Top Retail Search Data.""" + + date: DateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + symbol: str = Field(description=DATA_DESCRIPTIONS.get("symbol", "")) + activity: float = Field(description="Activity of the symbol.") + sentiment: float = Field( + description="Sentiment of the symbol. 1 is bullish, -1 is bearish." + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/trailing_dividend_yield.py b/openbb_platform/core/openbb_core/provider/standard_models/trailing_dividend_yield.py new file mode 100644 index 0000000000000000000000000000000000000000..bc23c63dfb79d7f2d91304d65346680ffc195302 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/trailing_dividend_yield.py @@ -0,0 +1,30 @@ +"""Trailing Dividend Yield Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class TrailingDivYieldQueryParams(QueryParams): + """Trailing Dividend Yield Query.""" + + symbol: str = Field(description=QUERY_DESCRIPTIONS.get("symbol", "")) + limit: Optional[int] = Field( + default=252, + description=f"{QUERY_DESCRIPTIONS.get('limit', '')}" + " Default is 252, the number of trading days in a year.", + ) + + +class TrailingDivYieldData(Data): + """Trailing Dividend Yield Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + trailing_dividend_yield: float = Field(description="Trailing dividend yield.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/treasury_auctions.py b/openbb_platform/core/openbb_core/provider/standard_models/treasury_auctions.py new file mode 100644 index 0000000000000000000000000000000000000000..51cf15b51b615a932b29d999a78525c9ef3a0d35 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/treasury_auctions.py @@ -0,0 +1,599 @@ +"""US Treasury Auctions Standard Model.""" + +from datetime import ( + date as dateType, + datetime, + timedelta, +) +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field, model_validator + + +class USTreasuryAuctionsQueryParams(QueryParams): + """US Treasury Auctions Query.""" + + __json_schema_extra__ = { + "security_type": { + "choices": ["bill", "note", "bond", "cmb", "tips", "frn"], + } + } + + security_type: Optional[Literal["bill", "note", "bond", "cmb", "tips", "frn"]] = ( + Field( + default=None, + description="Used to only return securities of a particular type.", + ) + ) + cusip: Optional[str] = Field( + default=None, + description="Filter securities by CUSIP.", + ) + page_size: Optional[int] = Field( + default=None, + description="Maximum number of results to return; you must also include pagenum when using pagesize.", + ) + page_num: Optional[int] = Field( + default=None, + description="The first page number to display results for; used in combination with page size.", + ) + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", "") + + " The default is 90 days ago.", + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", "") + " The default is today.", + ) + + @model_validator(mode="before") + @classmethod + def validate_dates(cls, values) -> dict: + """Validate the query parameters.""" + if not isinstance(values, dict): + return values + + if values.get("start_date") is None: + values["start_date"] = (datetime.now() - timedelta(days=90)).strftime( + "%Y-%m-%d" + ) + if values.get("end_date") is None: + values["end_date"] = datetime.now().strftime("%Y-%m-%d") + return values + + +class USTreasuryAuctionsData(Data): + """US Treasury Auctions Data.""" + + cusip: str = Field(description="CUSIP of the Security.") + issue_date: dateType = Field( + description="The issue date of the security.", + ) + security_type: Literal["Bill", "Note", "Bond", "CMB", "TIPS", "FRN"] = Field( + description="The type of security.", + ) + security_term: str = Field( + description="The term of the security.", + ) + maturity_date: dateType = Field( + description="The maturity date of the security.", + ) + interest_rate: Optional[float] = Field( + default=None, + description="The interest rate of the security.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + cpi_on_issue_date: Optional[float] = Field( + default=None, + description="Reference CPI rate on the issue date of the security.", + ) + cpi_on_dated_date: Optional[float] = Field( + default=None, + description="Reference CPI rate on the dated date of the security.", + ) + announcement_date: Optional[dateType] = Field( + default=None, + description="The announcement date of the security.", + ) + auction_date: Optional[dateType] = Field( + default=None, + description="The auction date of the security.", + ) + auction_date_year: Optional[int] = Field( + default=None, + description="The auction date year of the security.", + ) + dated_date: Optional[dateType] = Field( + default=None, + description="The dated date of the security.", + ) + first_payment_date: Optional[dateType] = Field( + default=None, + description="The first payment date of the security.", + ) + accrued_interest_per_100: Optional[float] = Field( + default=None, + description="Accrued interest per $100.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + accrued_interest_per_1000: Optional[float] = Field( + default=None, + description="Accrued interest per $1000.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + adjusted_accrued_interest_per_100: Optional[float] = Field( + default=None, + description="Adjusted accrued interest per $100.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + adjusted_accrued_interest_per_1000: Optional[float] = Field( + default=None, + description="Adjusted accrued interest per $1000.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + adjusted_price: Optional[float] = Field( + default=None, + description="Adjusted price.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + allocation_percentage: Optional[float] = Field( + default=None, + description="Allocation percentage, as normalized percentage points.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + allocation_percentage_decimals: Optional[float] = Field( + default=None, + description="The number of decimals in the Allocation percentage.", + ) + announced_cusip: Optional[str] = Field( + default=None, + description="The announced CUSIP of the security.", + ) + auction_format: Optional[str] = Field( + default=None, + description="The auction format of the security.", + ) + avg_median_discount_rate: Optional[float] = Field( + default=None, + description="The average median discount rate of the security.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + avg_median_investment_rate: Optional[float] = Field( + default=None, + description="The average median investment rate of the security.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + avg_median_price: Optional[float] = Field( + default=None, + description="The average median price paid for the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + avg_median_discount_margin: Optional[float] = Field( + default=None, + description="The average median discount margin of the security.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + avg_median_yield: Optional[float] = Field( + default=None, + description="The average median yield of the security.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + back_dated: Optional[Literal["Yes", "No"]] = Field( + default=None, + description="Whether the security is back dated.", + ) + back_dated_date: Optional[dateType] = Field( + default=None, + description="The back dated date of the security.", + ) + bid_to_cover_ratio: Optional[float] = Field( + default=None, + description="The bid to cover ratio of the security.", + ) + call_date: Optional[dateType] = Field( + default=None, + description="The call date of the security.", + ) + callable: Optional[Literal["Yes", "No"]] = Field( + default=None, + description="Whether the security is callable.", + ) + called_date: Optional[dateType] = Field( + default=None, + description="The called date of the security.", + ) + cash_management_bill: Optional[Literal["Yes", "No"]] = Field( + default=None, + description="Whether the security is a cash management bill.", + ) + closing_time_competitive: Optional[str] = Field( + default=None, + description="The closing time for competitive bids on the security.", + ) + closing_time_non_competitive: Optional[str] = Field( + default=None, + description="The closing time for non-competitive bids on the security.", + ) + competitive_accepted: Optional[int] = Field( + default=None, + description="The accepted value for competitive bids on the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + competitive_accepted_decimals: Optional[int] = Field( + default=None, + description="The number of decimals in the Competitive Accepted.", + ) + competitive_tendered: Optional[int] = Field( + default=None, + description="The tendered value for competitive bids on the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + competitive_tenders_accepted: Optional[Literal["Yes", "No"]] = Field( + default=None, + description="Whether competitive tenders are accepted on the security.", + ) + corp_us_cusip: Optional[str] = Field( + default=None, + description="The CUSIP of the security.", + ) + cpi_base_reference_period: Optional[str] = Field( + default=None, + description="The CPI base reference period of the security.", + ) + currently_outstanding: Optional[int] = Field( + default=None, + description="The currently outstanding value on the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + direct_bidder_accepted: Optional[int] = Field( + default=None, + description="The accepted value from direct bidders on the security.", + ) + direct_bidder_tendered: Optional[int] = Field( + default=None, + description="The tendered value from direct bidders on the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + est_amount_of_publicly_held_maturing_security: Optional[int] = Field( + default=None, + description="The estimated amount of publicly held maturing securities on the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + fima_included: Optional[Literal["Yes", "No"]] = Field( + default=None, + description="Whether the security is included in the FIMA (Foreign and International Money Authorities).", + ) + fima_non_competitive_accepted: Optional[int] = Field( + default=None, + description="The non-competitive accepted value on the security from FIMAs.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + fima_non_competitive_tendered: Optional[int] = Field( + default=None, + description="The non-competitive tendered value on the security from FIMAs.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + first_interest_period: Optional[str] = Field( + default=None, + description="The first interest period of the security.", + ) + first_interest_payment_date: Optional[dateType] = Field( + default=None, + description="The first interest payment date of the security.", + ) + floating_rate: Optional[Literal["Yes", "No"]] = Field( + default=None, + description="Whether the security is a floating rate.", + ) + frn_index_determination_date: Optional[dateType] = Field( + default=None, + description="The FRN index determination date of the security.", + ) + frn_index_determination_rate: Optional[float] = Field( + default=None, + description="The FRN index determination rate of the security.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + high_discount_rate: Optional[float] = Field( + default=None, + description="The high discount rate of the security.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + high_investment_rate: Optional[float] = Field( + default=None, + description="The high investment rate of the security.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + high_price: Optional[float] = Field( + default=None, + description="The high price of the security at auction.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + high_discount_margin: Optional[float] = Field( + default=None, + description="The high discount margin of the security.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + high_yield: Optional[float] = Field( + default=None, + description="The high yield of the security at auction.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + index_ratio_on_issue_date: Optional[float] = Field( + default=None, + description="The index ratio on the issue date of the security.", + ) + indirect_bidder_accepted: Optional[int] = Field( + default=None, + description="The accepted value from indirect bidders on the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + indirect_bidder_tendered: Optional[int] = Field( + default=None, + description="The tendered value from indirect bidders on the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + interest_payment_frequency: Optional[str] = Field( + default=None, + description="The interest payment frequency of the security.", + ) + low_discount_rate: Optional[float] = Field( + default=None, + description="The low discount rate of the security.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + low_investment_rate: Optional[float] = Field( + default=None, + description="The low investment rate of the security.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + low_price: Optional[float] = Field( + default=None, + description="The low price of the security at auction.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + low_discount_margin: Optional[float] = Field( + default=None, + description="The low discount margin of the security.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + low_yield: Optional[float] = Field( + default=None, + description="The low yield of the security at auction.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + maturing_date: Optional[dateType] = Field( + default=None, + description="The maturing date of the security.", + ) + max_competitive_award: Optional[int] = Field( + default=None, + description="The maximum competitive award at auction.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + max_non_competitive_award: Optional[int] = Field( + default=None, + description="The maximum non-competitive award at auction.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + max_single_bid: Optional[int] = Field( + default=None, + description="The maximum single bid at auction.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + min_bid_amount: Optional[int] = Field( + default=None, + description="The minimum bid amount at auction.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + min_strip_amount: Optional[int] = Field( + default=None, + description="The minimum strip amount at auction.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + min_to_issue: Optional[int] = Field( + default=None, + description="The minimum to issue at auction.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + multiples_to_bid: Optional[int] = Field( + default=None, + description="The multiples to bid at auction.", + ) + multiples_to_issue: Optional[int] = Field( + default=None, + description="The multiples to issue at auction.", + ) + nlp_exclusion_amount: Optional[int] = Field( + default=None, + description="The NLP exclusion amount at auction.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + nlp_reporting_threshold: Optional[int] = Field( + default=None, + description="The NLP reporting threshold at auction.", + ) + non_competitive_accepted: Optional[int] = Field( + default=None, + description="The accepted value from non-competitive bidders on the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + non_competitive_tenders_accepted: Optional[Literal["Yes", "No"]] = Field( + default=None, + description="Whether or not the auction accepted non-competitive tenders.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + offering_amount: Optional[int] = Field( + default=None, + description="The offering amount at auction.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + original_cusip: Optional[str] = Field( + default=None, + description="The original CUSIP of the security.", + ) + original_dated_date: Optional[dateType] = Field( + default=None, + description="The original dated date of the security.", + ) + original_issue_date: Optional[dateType] = Field( + default=None, + description="The original issue date of the security.", + ) + original_security_term: Optional[str] = Field( + default=None, + description="The original term of the security.", + ) + pdf_announcement: Optional[str] = Field( + default=None, + description="The PDF filename for the announcement of the security.", + ) + pdf_competitive_results: Optional[str] = Field( + default=None, + description="The PDF filename for the competitive results of the security.", + ) + pdf_non_competitive_results: Optional[str] = Field( + default=None, + description="The PDF filename for the non-competitive results of the security.", + ) + pdf_special_announcement: Optional[str] = Field( + default=None, + description="The PDF filename for the special announcements.", + ) + price_per_100: Optional[float] = Field( + default=None, + description="The price per 100 of the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + primary_dealer_accepted: Optional[int] = Field( + default=None, + description="The primary dealer accepted value on the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + primary_dealer_tendered: Optional[int] = Field( + default=None, + description="The primary dealer tendered value on the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + reopening: Optional[Literal["Yes", "No"]] = Field( + default=None, + description="Whether or not the auction was reopened.", + ) + security_term_day_month: Optional[str] = Field( + default=None, + description="The security term in days or months.", + ) + security_term_week_year: Optional[str] = Field( + default=None, + description="The security term in weeks or years.", + ) + series: Optional[str] = Field( + default=None, + description="The series name of the security.", + ) + soma_accepted: Optional[int] = Field( + default=None, + description="The SOMA accepted value on the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + soma_holdings: Optional[int] = Field( + default=None, + description="The SOMA holdings on the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + soma_included: Optional[Literal["Yes", "No"]] = Field( + default=None, + description="Whether or not the SOMA (System Open Market Account) was included on the security.", + ) + soma_tendered: Optional[int] = Field( + default=None, + description="The SOMA tendered value on the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + spread: Optional[float] = Field( + default=None, + description="The spread on the security.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + standard_payment_per_1000: Optional[float] = Field( + default=None, + description="The standard payment per 1000 of the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + strippable: Optional[Literal["Yes", "No"]] = Field( + default=None, + description="Whether or not the security is strippable.", + ) + term: Optional[str] = Field( + default=None, + description="The term of the security.", + ) + tiin_conversion_factor_per_1000: Optional[float] = Field( + default=None, + description="The TIIN conversion factor per 1000 of the security.", + ) + tips: Optional[Literal["Yes", "No"]] = Field( + default=None, + description="Whether or not the security is TIPS.", + ) + total_accepted: Optional[int] = Field( + default=None, + description="The total accepted value at auction.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + total_tendered: Optional[int] = Field( + default=None, + description="The total tendered value at auction.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + treasury_retail_accepted: Optional[int] = Field( + default=None, + description="The accepted value on the security from retail.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + treasury_retail_tenders_accepted: Optional[Literal["Yes", "No"]] = Field( + default=None, + description="Whether or not the tender offers from retail are accepted", + ) + type: Optional[str] = Field( + default=None, + description="The type of issuance. This might be different than the security type.", + ) + unadjusted_accrued_interest_per_1000: Optional[float] = Field( + default=None, + description="The unadjusted accrued interest per 1000 of the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + unadjusted_price: Optional[float] = Field( + default=None, + description="The unadjusted price of the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + updated_timestamp: Optional[datetime] = Field( + default=None, + description="The updated timestamp of the security.", + ) + xml_announcement: Optional[str] = Field( + default=None, + description="The XML filename for the announcement of the security.", + ) + xml_competitive_results: Optional[str] = Field( + default=None, + description="The XML filename for the competitive results of the security.", + ) + xml_special_announcement: Optional[str] = Field( + default=None, + description="The XML filename for special announcements.", + ) + tint_cusip1: Optional[str] = Field( + default=None, + description="Tint CUSIP 1.", + ) + tint_cusip2: Optional[str] = Field( + default=None, + description="Tint CUSIP 2.", + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/treasury_prices.py b/openbb_platform/core/openbb_core/provider/standard_models/treasury_prices.py new file mode 100644 index 0000000000000000000000000000000000000000..332ab155ea133de2faad90c476d4adac87ba5eb5 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/treasury_prices.py @@ -0,0 +1,101 @@ +"""Treasury Prices Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import QUERY_DESCRIPTIONS +from pydantic import Field + + +class TreasuryPricesQueryParams(QueryParams): + """Treasury Prices Query.""" + + date: Optional[dateType] = Field( + description=QUERY_DESCRIPTIONS.get("date", "") + + " Defaults to the last business day.", + default=None, + ) + + +class TreasuryPricesData(Data): + """Treasury Prices Data.""" + + issuer_name: Optional[str] = Field( + default=None, + description="Name of the issuing entity.", + ) + cusip: Optional[str] = Field( + default=None, + description="CUSIP of the security.", + ) + isin: Optional[str] = Field( + default=None, + description="ISIN of the security.", + ) + security_type: Optional[str] = Field( + default=None, + description="The type of Treasury security - i.e., Bill, Note, Bond, TIPS, FRN.", + ) + issue_date: Optional[dateType] = Field( + default=None, + description="The original issue date of the security.", + ) + maturity_date: Optional[dateType] = Field( + default=None, + description="The maturity date of the security.", + ) + call_date: Optional[dateType] = Field( + description="The call date of the security.", default=None + ) + bid: Optional[float] = Field( + default=None, + description="The bid price of the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + offer: Optional[float] = Field( + default=None, + description="The offer price of the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + eod_price: Optional[float] = Field( + default=None, + description="The end-of-day price of the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + last_traded_date: Optional[dateType] = Field( + description="The last trade date of the security.", default=None + ) + total_trades: Optional[int] = Field( + default=None, + description="Total number of trades on the last traded date.", + ) + last_price: Optional[float] = Field( + default=None, + description="The last price of the security.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + highest_price: Optional[float] = Field( + default=None, + description="The highest price for the bond on the last traded date.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + lowest_price: Optional[float] = Field( + default=None, + description="The lowest price for the bond on the last traded date.", + json_schema_extra={"x-unit_measurement": "currency"}, + ) + rate: Optional[float] = Field( + description="The annualized interest rate or coupon of the security.", + default=None, + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + ytm: Optional[float] = Field( + default=None, + description="Yield to maturity (YTM) is the rate of return anticipated on a bond" + + " if it is held until the maturity date. It takes into account" + + " the current market price, par value, coupon rate and time to maturity. It is assumed that all" + + " coupons are reinvested at the same rate.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/treasury_rates.py b/openbb_platform/core/openbb_core/provider/standard_models/treasury_rates.py new file mode 100644 index 0000000000000000000000000000000000000000..a29f81da063aa60acb6db5b57fbcb500b12bb870 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/treasury_rates.py @@ -0,0 +1,96 @@ +"""Treasury Rates Standard Model.""" + +from datetime import date as dateType +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class TreasuryRatesQueryParams(QueryParams): + """Treasury Rates Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class TreasuryRatesData(Data): + """Treasury Rates Data. All fields are expressed as a normalized percent - 1% = 0.01.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + week_4: Optional[float] = Field( + default=None, + description="4 week Treasury bills rate (secondary market).", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + month_1: Optional[float] = Field( + description="1 month Treasury rate.", + default=None, + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + month_2: Optional[float] = Field( + description="2 month Treasury rate.", + default=None, + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + month_3: Optional[float] = Field( + description="3 month Treasury rate.", + default=None, + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + month_6: Optional[float] = Field( + description="6 month Treasury rate.", + default=None, + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + year_1: Optional[float] = Field( + description="1 year Treasury rate.", + default=None, + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + year_2: Optional[float] = Field( + description="2 year Treasury rate.", + default=None, + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + year_3: Optional[float] = Field( + description="3 year Treasury rate.", + default=None, + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + year_5: Optional[float] = Field( + description="5 year Treasury rate.", + default=None, + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + year_7: Optional[float] = Field( + description="7 year Treasury rate.", + default=None, + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + year_10: Optional[float] = Field( + description="10 year Treasury rate.", + default=None, + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + year_20: Optional[float] = Field( + description="20 year Treasury rate.", + default=None, + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) + year_30: Optional[float] = Field( + description="30 year Treasury rate.", + default=None, + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/unemployment.py b/openbb_platform/core/openbb_core/provider/standard_models/unemployment.py new file mode 100644 index 0000000000000000000000000000000000000000..62d2766da20807ba2d51f198f48d815cc56853d9 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/unemployment.py @@ -0,0 +1,49 @@ +"""Unemployment Standard Model.""" + +from datetime import date as dateType +from typing import Literal, Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class UnemploymentQueryParams(QueryParams): + """Unemployment Query.""" + + country: str = Field( + description=QUERY_DESCRIPTIONS.get("country", ""), + default="united_states", + ) + frequency: Literal["monthly", "quarter", "annual"] = Field( + description=QUERY_DESCRIPTIONS.get("frequency", ""), + default="monthly", + json_schema_extra={"choices": ["monthly", "quarter", "annual"]}, + ) + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date") + ) + + +class UnemploymentData(Data): + """Unemployment Data.""" + + date: Optional[dateType] = Field( + default=None, description=DATA_DESCRIPTIONS.get("date") + ) + country: Optional[str] = Field( + default=None, + description="Country for which unemployment rate is given", + ) + value: Optional[float] = Field( + default=None, + description="Unemployment rate, as a normalized percent.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/university_of_michigan.py b/openbb_platform/core/openbb_core/provider/standard_models/university_of_michigan.py new file mode 100644 index 0000000000000000000000000000000000000000..2327d5d52d03dd881fff6db818801bb6d16ba44a --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/university_of_michigan.py @@ -0,0 +1,43 @@ +"""University Of Michigan Survey Standard Model.""" + +from datetime import ( + date as dateType, +) +from typing import Optional + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field + + +class UofMichiganQueryParams(QueryParams): + """University Of Michigan Survey Query.""" + + start_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("start_date", ""), + ) + end_date: Optional[dateType] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("end_date", ""), + ) + + +class UofMichiganData(Data): + """University Of Michigan Survey Data.""" + + date: dateType = Field(description=DATA_DESCRIPTIONS.get("date", "")) + consumer_sentiment: Optional[float] = Field( + default=None, + description="Index of the results of the University of Michigan's monthly Survey of Consumers," + + " which is used to estimate future spending and saving. (1966:Q1=100).", + ) + inflation_expectation: Optional[float] = Field( + default=None, + description="Median expected price change next 12 months, Surveys of Consumers.", + json_schema_extra={"x-unit_measurement": "percent", "x-frontend_multiply": 100}, + ) diff --git a/openbb_platform/core/openbb_core/provider/standard_models/world_news.py b/openbb_platform/core/openbb_core/provider/standard_models/world_news.py new file mode 100644 index 0000000000000000000000000000000000000000..72bc3fe91df317f5364b0d084b6dbdd09da6fcd5 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/world_news.py @@ -0,0 +1,64 @@ +"""World News Standard Model.""" + +from datetime import ( + date as dateType, + datetime, +) +from typing import Dict, List, Optional + +from dateutil.relativedelta import relativedelta +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, NonNegativeInt, field_validator + + +class WorldNewsQueryParams(QueryParams): + """World News Query.""" + + limit: NonNegativeInt = Field( + default=2500, + description=QUERY_DESCRIPTIONS.get("limit", "") + + " The number of articles to return.", + ) + start_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("start_date", "") + ) + end_date: Optional[dateType] = Field( + default=None, description=QUERY_DESCRIPTIONS.get("end_date", "") + ) + + @field_validator("start_date", mode="before") + @classmethod + def start_date_validate(cls, v) -> dateType: # pylint: disable=E0213 + """Populate start date if empty.""" + if not v: + now = datetime.now().date() + v = now - relativedelta(weeks=2) + return v + + @field_validator("end_date", mode="before") + @classmethod + def end_date_validate(cls, v) -> dateType: # pylint: disable=E0213 + """Populate end date if empty.""" + if not v: + v = datetime.now().date() + return v + + +class WorldNewsData(Data): + """World News Data.""" + + date: datetime = Field( + description=DATA_DESCRIPTIONS.get("date", "") + + " The published date of the article." + ) + title: str = Field(description="Title of the article.") + images: Optional[List[Dict[str, str]]] = Field( + default=None, description="Images associated with the article." + ) + text: Optional[str] = Field(default=None, description="Text/body of the article.") + url: Optional[str] = Field(default=None, description="URL to the article.") diff --git a/openbb_platform/core/openbb_core/provider/standard_models/yield_curve.py b/openbb_platform/core/openbb_core/provider/standard_models/yield_curve.py new file mode 100644 index 0000000000000000000000000000000000000000..382d6c2ce07668ebd603641d97e03d0a35c638a1 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/standard_models/yield_curve.py @@ -0,0 +1,72 @@ +"""Yield Curve Standard Model.""" + +from datetime import date as dateType +from typing import Optional, Union + +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.abstract.query_params import QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic import Field, computed_field, field_validator + + +class YieldCurveQueryParams(QueryParams): + """Yield Curve Query.""" + + date: Optional[Union[dateType, str]] = Field( + default=None, + description=QUERY_DESCRIPTIONS.get("date", "") + + " By default is the current data.", + ) + + @field_validator("date", mode="before", check_fields=False) + @classmethod + def _validate_date(cls, v): + """Validate the date.""" + # pylint: disable=import-outside-toplevel + from pandas import to_datetime + + if v is None: + return None + if isinstance(v, dateType): + return v.strftime("%Y-%m-%d") + new_dates: list = [] + dates: list = [] + if isinstance(v, str): + dates = v.split(",") + elif isinstance(v, list): + dates = v + for date in dates: + new_dates.append(to_datetime(date).date().strftime("%Y-%m-%d")) + + return ",".join(new_dates) if new_dates else None + + +class YieldCurveData(Data): + """Yield Curve Data.""" + + date: Optional[dateType] = Field( + default=None, + description=DATA_DESCRIPTIONS.get("date", ""), + ) + maturity: str = Field(description="Maturity length of the security.") + + @computed_field( # type: ignore + description="Maturity length, in years, as a decimal.", + return_type=Optional[float], + ) + @property + def maturity_years(self) -> Optional[float]: + """Get the maturity in years as a decimal.""" + if "_" not in self.maturity: # pylint: disable=E1135 + return None + + parts = self.maturity.split("_") # pylint: disable=E1101 + months = sum( + int(parts[i + 1]) * (12 if parts[i] == "year" else 1) + for i in range(0, len(parts), 2) + ) + + return months / 12 diff --git a/openbb_platform/core/openbb_core/provider/utils/__init__.py b/openbb_platform/core/openbb_core/provider/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..d824a05e8d0849d9f42838ccf420803fa66e71c2 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/utils/__init__.py @@ -0,0 +1 @@ +"""OpenBB Provider Utils.""" diff --git a/openbb_platform/core/openbb_core/provider/utils/client.py b/openbb_platform/core/openbb_core/provider/utils/client.py new file mode 100644 index 0000000000000000000000000000000000000000..67728d530bfc34a042602b423b95a9b58e5f1c00 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/utils/client.py @@ -0,0 +1,141 @@ +"""Aiohttp client.""" + +# pylint: disable=protected-access,invalid-overridden-method +import asyncio +import random +import warnings +from typing import Any, Dict, Type, Union + +import aiohttp +from multidict import CIMultiDict, CIMultiDictProxy, MultiDict + +FILTER_QUERY_REGEX = r".*key.*|.*token.*|.*auth.*|(c$)" + + +def obfuscate(params: Union[CIMultiDict[str], MultiDict[str]]) -> Dict[str, Any]: + """Obfuscate sensitive information.""" + # pylint: disable=import-outside-toplevel + import re + + return { + param: "********" if re.match(FILTER_QUERY_REGEX, param, re.IGNORECASE) else val + for param, val in params.items() + } + + +def get_user_agent() -> str: + """Get a not very random user agent.""" + user_agent_strings = [ + "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.10; rv:86.1) Gecko/20100101 Firefox/86.1", + "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:86.1) Gecko/20100101 Firefox/86.1", + "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:82.1) Gecko/20100101 Firefox/82.1", + "Mozilla/5.0 (Macintosh; Intel Mac OS X 10.13; rv:86.0) Gecko/20100101 Firefox/86.0", + "Mozilla/5.0 (Windows NT 10.0; WOW64; rv:86.0) Gecko/20100101 Firefox/86.0", + "Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10.10; rv:83.0) Gecko/20100101 Firefox/83.0", + "Mozilla/5.0 (Windows NT 6.1; WOW64; rv:84.0) Gecko/20100101 Firefox/84.0", + ] + + return random.choice(user_agent_strings) # nosec # noqa: S311 + + +class ClientResponse(aiohttp.ClientResponse): + """Client response class.""" + + def __init__(self, *args, **kwargs): + """Initialize the response.""" + kwargs["request_info"] = self.obfuscate_request_info(kwargs["request_info"]) + super().__init__(*args, **kwargs) + + @classmethod + def obfuscate_request_info( + cls, request_info: aiohttp.RequestInfo + ) -> aiohttp.RequestInfo: + """Remove sensitive information from request info.""" + query = obfuscate(request_info.url.query.copy()) + headers = CIMultiDictProxy(CIMultiDict(obfuscate(request_info.headers.copy()))) + url = request_info.url.with_query(query) + + return aiohttp.RequestInfo(url, request_info.method, headers, url) + + async def json(self, **kwargs) -> Union[dict, list]: + """Return the json response.""" + return await super().json(**kwargs) + + +class ClientSession(aiohttp.ClientSession): + """Client session.""" + + _response_class: Type[ClientResponse] + _session: "ClientSession" + + def __init__(self, *args, **kwargs): + """Initialize the session.""" + kwargs["connector"] = kwargs.get( + "connector", aiohttp.TCPConnector(ttl_dns_cache=300) + ) + kwargs["response_class"] = kwargs.get("response_class", ClientResponse) + kwargs["auto_decompress"] = kwargs.get("auto_decompress", False) + + super().__init__(*args, **kwargs) + + # pylint: disable=unused-argument + def __del__(self, _warnings: Any = warnings) -> None: + """Close the session.""" + if not self.closed: + asyncio.create_task(self.close()) + + async def get(self, url: str, **kwargs) -> ClientResponse: # type: ignore + """Send GET request.""" + return await self.request("GET", url, **kwargs) + + async def post(self, url: str, **kwargs) -> ClientResponse: # type: ignore + """Send POST request.""" + return await self.request("POST", url, **kwargs) + + async def get_json(self, url: str, **kwargs) -> Union[dict, list]: + """Send GET request and return json.""" + response = await self.request("GET", url, **kwargs) + return await response.json() + + async def get_one(self, url: str, **kwargs) -> Dict[str, Any]: + """Send GET request and return first item in json if list.""" + response = await self.request("GET", url, **kwargs) + data = await response.json() + + if isinstance(data, list): + return data[0] + + return data + + async def request( # type: ignore + self, *args, raise_for_status: bool = False, **kwargs + ) -> ClientResponse: + """Send request.""" + # pylint: disable=import-outside-toplevel + import zlib + + kwargs["headers"] = kwargs.get( + "headers", + # Default headers, makes sure we accept gzip + { + "Accept": "application/json", + "Accept-Encoding": "gzip, deflate", + "Connection": "keep-alive", + }, + ) + + if kwargs["headers"].get("User-Agent", None) is None: + kwargs["headers"]["User-Agent"] = get_user_agent() + + response = await super().request(*args, **kwargs) + + if raise_for_status: + response.raise_for_status() + + encoding = response.headers.get("Content-Encoding", "") + if encoding in ("gzip", "deflate") and not self.auto_decompress: + response_body = await response.read() + wbits = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS + response._body = zlib.decompress(response_body, wbits) + + return response # type: ignore diff --git a/openbb_platform/core/openbb_core/provider/utils/descriptions.py b/openbb_platform/core/openbb_core/provider/utils/descriptions.py new file mode 100644 index 0000000000000000000000000000000000000000..7f860526062e349bcf148e7a9cf0e7e64b098e04 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/utils/descriptions.py @@ -0,0 +1,29 @@ +"""Common descriptions for model fields.""" + +QUERY_DESCRIPTIONS = { + "symbol": "Symbol to get data for.", + "start_date": "Start date of the data, in YYYY-MM-DD format.", + "end_date": "End date of the data, in YYYY-MM-DD format.", + "interval": "Time interval of the data to return.", + "period": "Time period of the data to return.", + "date": "A specific date to get data for.", + "limit": "The number of data entries to return.", + "country": "The country to get data.", + "countries": "The country or countries to get data.", + "units": "The unit of measurement for the data.", + "frequency": "The frequency of the data.", +} + +DATA_DESCRIPTIONS = { + "symbol": "Symbol representing the entity requested in the data.", + "cik": "Central Index Key (CIK) for the requested entity.", + "date": "The date of the data.", + "open": "The open price.", + "high": "The high price.", + "low": "The low price.", + "close": "The close price.", + "volume": "The trading volume.", + "adj_close": "The adjusted close price.", + "vwap": "Volume Weighted Average Price over the period.", + "prev_close": "The previous close price.", +} diff --git a/openbb_platform/core/openbb_core/provider/utils/errors.py b/openbb_platform/core/openbb_core/provider/utils/errors.py new file mode 100644 index 0000000000000000000000000000000000000000..c4bb8163deefd9ffec7e308b609c90ae7a9cb727 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/utils/errors.py @@ -0,0 +1,39 @@ +"""Custom exceptions for the provider.""" + +from typing import Union + +from openbb_core.app.model.abstract.error import OpenBBError + + +class EmptyDataError(OpenBBError): + """Exception raised for empty data.""" + + def __init__( + self, message: str = "No results found. Try adjusting the query parameters." + ): + """Initialize the exception.""" + self.message = message + super().__init__(self.message) + + +class UnauthorizedError(OpenBBError): + """Exception raised for an unauthorized provider request response.""" + + def __init__( + self, + message: Union[str, tuple[str]] = ( + "Unauthorized API request." + " Please check your credentials and subscription access.", + ), + provider_name: str = "", + ): + """Initialize the exception.""" + if provider_name and provider_name != "": + msg = message + if isinstance(msg, tuple): + msg = msg[0].replace("", provider_name) + elif isinstance(msg, str): + msg = msg.replace("", provider_name) + message = msg + self.message = message + super().__init__(str(self.message)) diff --git a/openbb_platform/core/openbb_core/provider/utils/helpers.py b/openbb_platform/core/openbb_core/provider/utils/helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..86b16020a8f77e89aee890bc50052640aa13c884 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/utils/helpers.py @@ -0,0 +1,624 @@ +"""Provider helpers.""" + +import asyncio +import os +from datetime import date, datetime, timedelta, timezone +from difflib import SequenceMatcher +from functools import partial +from inspect import iscoroutinefunction +from typing import ( + TYPE_CHECKING, + Awaitable, + Callable, + List, + Literal, + Optional, + TypeVar, + Union, + cast, +) + +from anyio.from_thread import start_blocking_portal +from openbb_core.provider.abstract.data import Data +from openbb_core.provider.utils.client import ( + ClientResponse, + ClientSession, + get_user_agent, +) +from typing_extensions import ParamSpec + +if TYPE_CHECKING: + from requests import Response, Session # pylint: disable=import-outside-toplevel + +T = TypeVar("T") +P = ParamSpec("P") +D = TypeVar("D", bound="Data") + + +def check_item(item: str, allowed: List[str], threshold: float = 0.75) -> None: + """Check if an item is in a list of allowed items and raise an error if not. + + Parameters + ---------- + item : str + The item to check. + allowed : List[str] + The list of allowed items. + threshold : float, optional + The similarity threshold for the error message, by default 0.75 + + Raises + ------ + ValueError + If the item is not in the allowed list. + """ + if item not in allowed: + similarities = map( + lambda c: (c, SequenceMatcher(None, item, c).ratio()), allowed + ) + similar, score = max(similarities, key=lambda x: x[1]) + if score > threshold: + raise ValueError(f"'{item}' is not available. Did you mean '{similar}'?") + raise ValueError(f"'{item}' is not available.") + + +def get_querystring(items: dict, exclude: List[str]) -> str: + """Turn a dictionary into a querystring, excluding the keys in the exclude list. + + Parameters + ---------- + items: dict + The dictionary to be turned into a querystring. + + exclude: List[str] + The keys to be excluded from the querystring. + + Returns + ------- + str + The querystring. + """ + for key in exclude: + items.pop(key, None) + + query_items = [] + for key, value in items.items(): + if value is None: + continue + if isinstance(value, list): + for item in value: + query_items.append(f"{key}={item}") + else: + query_items.append(f"{key}={value}") + + querystring = "&".join(query_items) + + return f"{querystring}" if querystring else "" + + +def get_python_request_settings() -> dict: + """ + Get the python settings from the system_settings.json file. + + They are read from the "http" key in the "python_settings" key in the system_settings.json file. + + The configuration applies to both the requests and aiohttp libraries. + + Available settings: + - cafile: Path to a CA certificate file. + - certfile: Path to a client certificate file. + - keyfile: Path to a client key file. + - password: Password for the client key file. # aiohttp only + - verify_ssl: Verify SSL certificates. + - fingerprint: SSL fingerprint. # aiohttp only + - proxy: Proxy URL. + - proxy_auth: Proxy authentication. # aiohttp only + - proxy_headers: Proxy headers. # aiohttp only + - timeout: Request timeout. + - auth: Basic authentication. + - headers: Request headers. + - cookies: Dictionary of session cookies. + + Any additional keys supplied will be ignored. + """ + # pylint: disable=import-outside-toplevel + from openbb_core.app.service.system_service import SystemService + + python_settings = SystemService().system_settings.python_settings.model_dump() + http_settings = python_settings.get("http", {}) + allowed_keys = [ + "cafile", + "certfile", + "keyfile", + "password", + "verify_ssl", + "fingerprint", + "proxy", + "proxy_auth", + "proxy_headers", + "timeout", + "auth", + "headers", + "cookies", + ] + + return { + k: v for k, v in http_settings.items() if v is not None and k in allowed_keys + } + + +def get_requests_session(**kwargs) -> "Session": + """Get a requests session object with the applied user settings or environment variables.""" + # pylint: disable=import-outside-toplevel + import requests + + # If a session is already provided, just return it. + if "session" in kwargs and isinstance(kwargs.get("session"), requests.Session): + return kwargs["session"] + + # We want to add a user agent to the request, so check if there are any headers + # If there are headers, check if there is a user agent, if not add one. + # Some requests seem to work only with a specific user agent, so we want to be able to override it. + python_settings = get_python_request_settings() + headers = kwargs.pop("headers", {}) + headers.update(python_settings.pop("headers", {})) + + if "User-Agent" not in headers: + headers["User-Agent"] = get_user_agent() + + # Allow a custom session for caching, if desired + _session: requests.Session = kwargs.pop("session", None) or requests.Session() + _session.headers.update(headers) + + if python_settings.get("verify_ssl") is False: + _session.verify = False + else: + ca_file = python_settings.get("cafile") + requests_ca_bundle = os.environ.get("REQUESTS_CA_BUNDLE") + cert = ca_file or requests_ca_bundle + if cert: + bundle = requests_ca_bundle if requests_ca_bundle != cert else None + _session.verify = combine_certificates(cert, bundle) + + if certfile := python_settings.get("certfile"): + keyfile = python_settings.get("keyfile") + _session.cert = (certfile, keyfile) if keyfile else certfile + + proxy = python_settings.get("proxy") + http_proxy = os.environ.get("HTTP_PROXY", os.environ.get("HTTPS_PROXY")) + https_proxy = os.environ.get("HTTPS_PROXY", os.environ.get("HTTP_PROXY")) + + if http_proxy is not None and http_proxy == https_proxy: + https_proxy = None + + if http_proxy or https_proxy or proxy: + proxies: dict = {} + if http := http_proxy or https_proxy or proxy: + proxies["http"] = http + if https := https_proxy or http_proxy or proxy: + proxies["https"] = https + _session.proxies = proxies + + if cookies := python_settings.get("cookies"): + _session.cookies = ( + cookies + if isinstance(cookies, requests.cookies.RequestsCookieJar) + else requests.cookies.cookiejar_from_dict(cookies) + ) + + if auth := python_settings.get("auth"): + _session.auth = ( + auth if isinstance(auth, (tuple, requests.auth.AuthBase)) else tuple(auth) + ) + + if kwargs: + for key, value in kwargs.items(): + try: + if hasattr(_session, key): + if hasattr(getattr(_session, key, None), "update"): + getattr(_session, key, {}).update(value) + else: + setattr(_session, key, value) + except AttributeError: + continue + + _session.trust_env = False + + return _session + + +async def get_async_requests_session(**kwargs) -> ClientSession: + """Get an aiohttp session object with the applied user settings or environment variables.""" + # pylint: disable=import-outside-toplevel + import aiohttp # noqa + import atexit + import ssl + + # If a session is already provided, just return it. + if "session" in kwargs and isinstance(kwargs.get("session"), ClientSession): + return kwargs["session"] + # Handle SSL settings and proxies + # We will accommodate the Requests environment variable for the CA bundle and HTTP Proxies, if provided. + # The settings file will take precedence over the environment variables. + python_settings = get_python_request_settings() + _ = kwargs.pop("raise_for_status", None) + + proxy = python_settings.get("proxy") + http_proxy = os.environ.get("HTTP_PROXY", os.environ.get("HTTPS_PROXY")) + https_proxy = os.environ.get("HTTPS_PROXY", os.environ.get("HTTP_PROXY")) + + # aiohttp will attempt to upgrade the proxy to https. + if not proxy and http_proxy is not None and http_proxy == https_proxy: + python_settings["proxy"] = http_proxy.replace("https:", "http:") + + # If a proxy is provided, or verify_ssl is False, we don't need to handle the certificate and create SSL context. + # This takes priority over the cafile. + if python_settings.get("proxy") or python_settings.get("verify_ssl") is False: + python_settings["verify_ssl"] = None + python_settings["ssl"] = False + elif ( + python_settings.get("certfile") + or python_settings.get("cafile") + or os.environ.get("REQUESTS_CA_BUNDLE") + ): + ca = python_settings.get("cafile") or os.environ.get("REQUESTS_CA_BUNDLE") + cert = python_settings.get("certfile") + key = python_settings.get("keyfile") + password = python_settings.get("password") + ssl_context = ssl.create_default_context() + + if ca: + ssl_context.load_verify_locations(cafile=ca) + + if cert: + ssl_context.load_cert_chain( + certfile=cert, + keyfile=key, + password=password, + ) + + python_settings["ssl"] = ssl_context + + ssl_kwargs = { + k: v + for k, v in python_settings.items() + if k in ["ssl", "verify_ssl", "fingerprint"] and v is not None + } + + # Merge the updated python_settings dict with the kwargs. + if python_settings: + kwargs.update( + {k: v for k, v in python_settings.items() if not k.endswith("file")} + ) + + # SSL settings get passed to the TCPConnector used by the session. + connector = kwargs.pop("connector", None) or ( + aiohttp.TCPConnector(ttl_dns_cache=300, **ssl_kwargs) if ssl_kwargs else None + ) + + conn_kwargs = {"connector": connector} if connector else {} + + # Add basic auth for proxies, if provided. + p_auth = kwargs.pop("proxy_auth", []) + if p_auth: + conn_kwargs["proxy_auth"] = aiohttp.BasicAuth( + *p_auth if isinstance(p_auth, (list, tuple)) else p_auth + ) + # Add basic auth for server, if provided. + s_auth = kwargs.pop("auth", []) + if s_auth: + conn_kwargs["auth"] = aiohttp.BasicAuth( + *s_auth if isinstance(s_auth, (list, tuple)) else s_auth + ) + # Add cookies to the session, if provided. + _cookies = kwargs.pop("cookies", None) + if _cookies: + if isinstance(_cookies, dict): + conn_kwargs["cookies"] = _cookies + elif isinstance(_cookies, aiohttp.CookieJar): + conn_kwargs["cookie_jar"] = _cookies + + # Pass any remaining kwargs to the session + for k, v in kwargs.items(): + if v is None: + continue + if k == "timeout": + conn_kwargs["timeout"] = ( + v + if isinstance(v, aiohttp.ClientTimeout) + else aiohttp.ClientTimeout(total=v) + ) + elif k not in ("ssl", "verify_ssl", "fingerprint") and k in python_settings: + conn_kwargs[k] = v + + _session: ClientSession = ClientSession(**conn_kwargs) + + def at_exit(session): + """Close the session at exit if it was orphaned.""" + if not session.closed: + run_async(session.close) + + # Register the session to close at exit + atexit.register(at_exit, _session) + + return _session + + +async def amake_request( + url: str, + method: Literal["GET", "POST"] = "GET", + timeout: int = 10, + response_callback: Optional[ + Callable[[ClientResponse, ClientSession], Awaitable[Union[dict, List[dict]]]] + ] = None, + **kwargs, +) -> Union[dict, List[dict]]: + """ + Abstract helper to make requests from a url with potential headers and params. + + Parameters + ---------- + url : str + Url to make the request to + method : str, optional + HTTP method to use. Can be "GET" or "POST", by default "GET" + timeout : int, optional + Timeout in seconds, by default 10. Can be overwritten by user setting, request_timeout + response_callback : Callable[[ClientResponse, ClientSession], Awaitable[Union[dict, List[dict]]]], optional + Async callback with response and session as arguments that returns the json, by default None + session : ClientSession, optional + Custom session to use for requests, by default None + + + Returns + ------- + Union[dict, List[dict]] + Response json + """ + if method.upper() not in ["GET", "POST"]: + raise ValueError("Method must be GET or POST") + + kwargs["timeout"] = kwargs.pop("preferences", {}).get("request_timeout", timeout) + + response_callback = response_callback or ( + lambda r, _: asyncio.ensure_future(r.json()) + ) + + with_session = kwargs.pop("with_session", "session" in kwargs) + session = kwargs.pop("session", await get_async_requests_session(**kwargs)) + + try: + response = await session.request(method, url, **kwargs) + return await response_callback(response, session) + finally: + if not with_session: + await session.close() + + +async def amake_requests( + urls: Union[str, List[str]], + response_callback: Optional[ + Callable[[ClientResponse, ClientSession], Awaitable[Union[dict, List[dict]]]] + ] = None, + **kwargs, +): + """Make multiple requests asynchronously. + + Parameters + ---------- + urls : Union[str, List[str]] + List of urls to make requests to + method : Literal["GET", "POST"], optional + HTTP method to use. Can be "GET" or "POST", by default "GET" + timeout : int, optional + Timeout in seconds, by default 10. Can be overwritten by user setting, request_timeout + response_callback : Callable[[ClientResponse, ClientSession], Awaitable[Union[dict, List[dict]]]], optional + Async callback with response and session as arguments that returns the json, by default None + session : ClientSession, optional + Custom session to use for requests, by default None + + Returns + ------- + Union[dict, List[dict]] + Response json + """ + session = kwargs.pop("session", await get_async_requests_session(**kwargs)) + kwargs["response_callback"] = response_callback + urls = urls if isinstance(urls, list) else [urls] + + try: + results: list = [] + + for result in await asyncio.gather( + *[amake_request(url, session=session, **kwargs) for url in urls], + return_exceptions=True, + ): + is_exception = isinstance(result, Exception) + + if is_exception and kwargs.get("raise_for_status", False): + raise result # type: ignore[misc] + + if is_exception or not result: + continue + + results.extend( + result if isinstance(result, list) else [result] # type: ignore[list-item] + ) + + return results + + finally: + await session.close() + + +def combine_certificates(cert: str, bundle: Optional[str] = None) -> str: + """Combine a certificate and a bundle into a single certificate file. Use the default bundle if none is provided.""" + # pylint: disable=import-outside-toplevel + import atexit # noqa + import certifi + import shutil + from pathlib import Path + from warnings import warn + + if not Path(cert).exists(): + raise FileNotFoundError(f"Certificate file '{cert}' not found") + + if cert.split(".")[0].endswith("_combined"): + return cert + + combined_cert = cert.split(".")[0] + "_combined." + cert.split(".")[1] + + if Path(combined_cert).exists(): + return combined_cert + + if not bundle: + bundle = certifi.where() + + try: + with open(combined_cert, "wb") as combined_cert_file: + # Write the default CA bundle to the combined certificate file + with open(bundle, "rb") as bundle_file: + shutil.copyfileobj(bundle_file, combined_cert_file) + + # Write the custom CA certificate to the combined certificate file + with open(cert, "rb") as cert_file: + shutil.copyfileobj(cert_file, combined_cert_file) + + # Register the combined certificate file for deletion + atexit.register(os.remove, combined_cert) + + return combined_cert + except Exception as e: # pylint: disable=broad-except + warn( + f"An error occurred while handling the certificates file -> {e.__class__.__name__}: {e}" + ) + return cert + + +def make_request( + url: str, method: str = "GET", timeout: int = 10, **kwargs +) -> "Response": + """Abstract helper to make requests from a url with potential headers and params. + + Parameters + ---------- + url : str + Url to make the request to + method : str, optional + HTTP method to use. Can be "GET" or "POST", by default "GET" + timeout : int, optional + Timeout in seconds, by default 10. Can be overwritten by user setting, request_timeout + + Returns + ------- + Response + Request response object + + Raises + ------ + ValueError + If invalid method is passed + """ + # We want to add a user agent to the request, so check if there are any headers + # If there are headers, check if there is a user agent, if not add one. + # Some requests seem to work only with a specific user agent, so we want to be able to override it. + python_settings = get_python_request_settings() + headers = kwargs.pop("headers", {}) + headers.update(python_settings.pop("headers", {})) + preferences = kwargs.pop("preferences", None) + + if preferences and "request_timeout" in preferences: + timeout = preferences["request_timeout"] or timeout + elif "timeout" in python_settings: + timeout = python_settings["timeout"] + + if "User-Agent" not in headers: + headers["User-Agent"] = get_user_agent() + + # Allow a custom session for caching, if desired + _session = kwargs.pop("session", get_requests_session(**kwargs)) + + if method.upper() == "GET": + return _session.get( + url, + headers=headers, + timeout=timeout, + **kwargs, + ) + if method.upper() == "POST": + return _session.post( + url, + headers=headers, + timeout=timeout, + **kwargs, + ) + raise ValueError("Method must be GET or POST") + + +def to_snake_case(string: str) -> str: + """Convert a string to snake case.""" + import re # pylint: disable=import-outside-toplevel + + s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", string) + return ( + re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1) + .lower() + .replace(" ", "_") + .replace("__", "_") + ) + + +async def maybe_coroutine( + func: Callable[P, Union[T, Awaitable[T]]], /, *args: P.args, **kwargs: P.kwargs +) -> T: + """Check if a function is a coroutine and run it accordingly.""" + if not iscoroutinefunction(func): + return cast(T, func(*args, **kwargs)) + + return await func(*args, **kwargs) + + +def run_async( + func: Callable[P, Awaitable[T]], /, *args: P.args, **kwargs: P.kwargs +) -> T: + """Run a coroutine function in a blocking context.""" + if not iscoroutinefunction(func): + return cast(T, func(*args, **kwargs)) + + with start_blocking_portal() as portal: + try: + return portal.call(partial(func, *args, **kwargs)) + finally: + portal.call(portal.stop) + + +def filter_by_dates( + data: List[D], start_date: Optional[date] = None, end_date: Optional[date] = None +) -> List[D]: + """Filter data by dates.""" + if start_date is None and end_date is None: + return data + + def _filter(d: Data) -> bool: + _date = getattr(d, "date", None) + dt = _date.date() if _date and isinstance(_date, datetime) else _date + if dt: + if start_date and end_date: + return start_date <= dt <= end_date + if start_date: + return dt >= start_date + if end_date: + return dt <= end_date + return True + return False + + return list(filter(_filter, data)) + + +def safe_fromtimestamp( + timestamp: Union[float, int], tz: Optional[timezone] = None +) -> datetime: + """datetime.fromtimestamp alternative which supports negative timestamps on Windows platform.""" + if os.name == "nt" and timestamp < 0: + return datetime(1970, 1, 1, tzinfo=tz) + timedelta(seconds=timestamp) + return datetime.fromtimestamp(timestamp, tz) diff --git a/openbb_platform/core/openbb_core/provider/utils/options_chains_properties.py b/openbb_platform/core/openbb_core/provider/utils/options_chains_properties.py new file mode 100644 index 0000000000000000000000000000000000000000..0d89426dc0c3d7091e8ce01e0f131e500675bb66 --- /dev/null +++ b/openbb_platform/core/openbb_core/provider/utils/options_chains_properties.py @@ -0,0 +1,1855 @@ +"""Options Chains Properties.""" + +# pylint: disable=too-many-lines, too-many-arguments, too-many-locals, too-many-statements, too-many-positional-arguments + +from datetime import datetime +from functools import cached_property +from typing import TYPE_CHECKING, Dict, List, Literal, Optional, Union + +from openbb_core.app.model.abstract.error import OpenBBError +from openbb_core.provider.abstract.data import Data + +if TYPE_CHECKING: + from pandas import DataFrame + + +class OptionsChainsProperties(Data): + """Base Class For OptionsChainsData. + + Note: This class is not intended to be initialized directly and requires a validated instance of OptionsChainsData. + """ + + @property + def last_price(self): + """The manually-set price of the underlying asset.""" + if hasattr(self, "_last_price"): + return self._last_price + return None + + @last_price.setter + def last_price(self, price: float): + """Manually set the price of the underlying asset. + + Use this property to override the underlying price returned by the provider. + + Deleting the property will revert to the provider's underlying price. + """ + self._last_price = price + + @last_price.deleter + def last_price(self): + """Delete the last price property.""" + if hasattr(self, "_last_price"): + del self._last_price + + @cached_property + def dataframe(self) -> "DataFrame": + """Return all data as a Pandas DataFrame, + with additional computed columns (Breakeven, GEX, DEX) if available. + """ + # pylint: disable=import-outside-toplevel + from numpy import nan + from pandas import DataFrame, DatetimeIndex, Timedelta, concat, to_datetime + + chains_data = DataFrame( + self.model_dump( + exclude_unset=True, + exclude_none=True, + ) + ) + + if "underlying_price" not in chains_data.columns and not self.last_price: + raise OpenBBError( + "'underlying_price' was not returned in the provider data." + + "\n\n Please set the 'last_price' property and try again." + + "\n\n Note: This error does not impact the standard OBBject `to_df()` method." + ) + + # Add the underlying price to the DataFrame, or override the existing price. + if self.last_price: + chains_data.loc[:, "underlying_price"] = self.last_price + + if chains_data.empty: + raise OpenBBError("Error: No validated data was found.") + + if "dte" not in chains_data.columns and "eod_date" in chains_data.columns: + _date = to_datetime(chains_data.eod_date) + temp = DatetimeIndex(chains_data.expiration) + temp_ = temp - _date # type: ignore + chains_data.loc[:, "dte"] = [Timedelta(_temp_).days for _temp_ in temp_] + + if "dte" in chains_data.columns: + chains_data = DataFrame(chains_data[chains_data.dte >= 0]) + + if "dte" not in chains_data.columns and "eod_date" not in chains_data.columns: + today = datetime.today().date() + chains_data.loc[:, "dte"] = chains_data.expiration - today + + # Add the breakeven price for each option, and the DEX and GEX for each option, if available. + try: + _calls = DataFrame(chains_data[chains_data.option_type == "call"]) + _puts = DataFrame(chains_data[chains_data.option_type == "put"]) + _ask = self._identify_price_col( # pylint: disable=W0212 + chains_data, "call", "ask" + ) + _calls.loc[:, ("Breakeven")] = _calls.strike + _calls.loc[:, (_ask)] + _puts.loc[:, ("Breakeven")] = _puts.strike - _puts.loc[:, (_ask)] + if "delta" in _calls.columns: + _calls.loc[:, ("DEX")] = ( + ( + _calls.delta + * ( + _calls.contract_size + if hasattr(_calls, "contract_size") + else 100 + ) + * _calls.open_interest + * _calls.underlying_price + ) + .replace({nan: 0}) + .astype("int64") + ) + _puts.loc[:, ("DEX")] = ( + ( + _puts.delta + * ( + _puts.contract_size + if hasattr(_puts, "contract_size") + else 100 + ) + * _puts.open_interest + * _puts.underlying_price + ) + .replace({nan: 0}) + .astype("int64") + ) + + if "gamma" in _calls.columns: + _calls.loc[:, ("GEX")] = ( + ( + _calls.gamma + * ( + _calls.contract_size + if hasattr(_calls, "contract_size") + else 100 + ) + * _calls.open_interest + * (_calls.underlying_price * _calls.underlying_price) + * 0.01 + ) + .replace({nan: 0}) + .astype("int64") + ) + _puts.loc[:, ("GEX")] = ( + ( + _puts.gamma + * ( + _puts.contract_size + if hasattr(_puts, "contract_size") + else 100 + ) + * _puts.open_interest + * (_puts.underlying_price * _puts.underlying_price) + * 0.01 + * (-1) + ) + .replace({nan: 0}) + .astype("int64") + ) + + _calls.set_index(keys=["expiration", "strike", "option_type"], inplace=True) + _puts.set_index(keys=["expiration", "strike", "option_type"], inplace=True) + df = concat([_puts, _calls]) + df = df.sort_index().reset_index() + + return df + + except Exception: # pylint: disable=broad-exception-caught + return chains_data + + @property + def expirations(self) -> List[str]: + """Return a list of unique expiration dates, as strings.""" + return sorted([d.strftime("%Y-%m-%d") for d in list(set(self.expiration))]) # type: ignore + + @property + def strikes(self) -> List[float]: + """Return a list of unique strike prices.""" + return sorted(list(set(self.strike))) # type: ignore + + @property + def has_iv(self) -> bool: + """Return True if the data contains implied volatility.""" + return any([self.implied_volatility]) # type: ignore + + @property + def has_greeks(self) -> bool: + """Return True if the data contains greeks.""" + return any([self.delta, self.gamma, self.theta, self.vega, self.rho]) # type: ignore + + @property + def total_oi(self) -> Dict: + """Return open interest stats as a nested dictionary with keys: total, expiration, strike. + + Both, "expiration" and "strike", contain a list of records with fields: + Calls, Puts, Total, Net Percent, PCR. + """ + return self._get_stat("open_interest") + + @property + def total_volume(self) -> Dict: + """Return volume stats as a nested dictionary with keys: total, expiration, strike. + + Both, "expiration" and "strike", contain a list of records with fields: + Calls, Puts, Total, Net Percent, PCR. + """ + return self._get_stat("volume") + + @property + def total_dex(self) -> Dict: + """Return Delta Dollars (DEX) as a nested dictionary with keys: total, expiration, strike. + + Both, "expiration" and "strike", contain a list of records with fields: + Calls, Puts, Total, Net Percent, PCR. + """ + if not self.has_greeks: + raise OpenBBError("Greeks are not available.") + return self._get_stat("DEX") + + @property + def total_gex(self) -> Dict: + """Return Gamma Exposure stats as a nested dictionary with keys: total, expiration, strike. + + Both, "expiration" and "strike", contain a list of records with fields: + Calls, Puts, Total, Net Percent, PCR. + """ + if not self.has_greeks: + raise OpenBBError("Greeks are not available.") + return self._get_stat("GEX") + + @staticmethod + def _identify_price_col( + df: "DataFrame", + option_type: Literal["call", "put"], + bid_ask: Literal["bid", "ask"], + ) -> str: + """Select the bid or ask price for the given option type. + This method is not intended to be called directly, + it identifies the price column where the name may vary by provider. + + Parameters + ---------- + df: DataFrame + The DataFrame containing the option data. + option_type: str + The option type to use when selecting the bid or ask price. + bid_ask: Literal["bid", "ask"] + The side of the trade to get the price for. + + Returns + ------- + str + Name of the price column to use. + """ + price_col = "" + bid_fields = [ + "bid", + "last_trade_price", + "close", + "close_bid", + "prev_close", + "mark", + "settlement_price", + ] + ask_fields = [ + "ask", + "last_trade_price", + "close", + "close_ask", + "prev_close", + "mark", + "settlement_price", + ] + fields = bid_fields if bid_ask == "bid" else ask_fields + new_df = df[df["option_type"] == option_type].copy() + + for field in fields: + if field in new_df.columns: + price_col = field + break + + return price_col + + def filter_data( + self, + date: Optional[Union[str, int]] = None, + option_type: Optional[Literal["call", "put"]] = None, + moneyness: Optional[Literal["otm", "itm"]] = None, + column: Optional[str] = None, + value_min: Optional[float] = None, + value_max: Optional[float] = None, + stat: Optional[Literal["open_interest", "volume", "dex", "gex"]] = None, + by: Literal["expiration", "strike"] = "expiration", + ) -> "DataFrame": + """Return statistics by strike or expiration; or, the filtered chains data. + + Parameters + ---------- + date: Optional[Union[str, int]] + The expiration date, or days until expiry, to use. This is applied before any filters. + option_type: Optional[Literal["call", "put"]] + The option type to filter by, None returns both. + This is ignored if stat is not None. + moneyness: Optional[Literal["otm", "itm"]] + The moneyness to filter by, None returns both. + column: Optional[str] + The column to filter by. + If no min/max are supplied it will sort all data by this column, in descending order. + This is ignored if stat is not None. + value_min: Optional[float] + The minimum value to filter by. Column must be numeric. + This is ignored if stat is not None. + value_max: Optional[float] + The maximum value to filter by. Column must be numeric. + This is ignored if stat is not None. + stat: Optional[Literal["open_interest", "volume", "dex", "gex"]] + The statistical metric to filter by. + Other fields are ignored if this is not None. + by: Literal["expiration", "strike"] + Filter the `stat` by expiration or strike, default is "expiration". + If a date is supplied, "strike" is always returned. + This is ignored if `stat` is None. + """ + # pylint: disable=import-outside-toplevel + from numpy import nan + from pandas import DataFrame, concat + + stats = ["open_interest", "volume", "dex", "gex"] + _stat = stat.upper() if stat in ["dex", "gex"] else stat + by = "strike" if date is not None else by + if stat is not None: + if stat not in stats: + raise OpenBBError(f"Error: stat must be one of {stats}") + if stat in ["volume", "open_interest"]: + return DataFrame(self._get_stat(stat, moneyness=moneyness, date=date)[by]).replace({nan: None}) # type: ignore + if ( + _stat not in self.dataframe.columns + and self.has_greeks + and "underlying_price" not in self.dataframe.columns + ): + raise OpenBBError( + f"Error: '{stat}' could not be generated because" + + " the underlying price was not returned by the provider." + + " Set manually with 'underlying_price' property." + ) + df = DataFrame(self._get_stat(_stat, moneyness=moneyness, date=date)[by]) # type: ignore + return df.replace({nan: None}) + + df = self.dataframe + + if moneyness is not None: + df_calls = DataFrame( + df[df.strike >= df.underlying_price].query("option_type == 'call'") + ) + df_puts = DataFrame( + df[df.strike <= df.underlying_price].query("option_type == 'put'") + ) + df = concat([df_calls, df_puts]) + + if date is not None: + date = self._get_nearest_expiration(date) + df = DataFrame(df[df.expiration.astype(str) == date]) + + if option_type is not None: + df = DataFrame(df[df.option_type == option_type]) + + if column is not None: + if column not in df.columns: + raise OpenBBError(f"Error: column '{column}' not found in data") + df = DataFrame(df[df[column].notnull()]) + if value_min is not None and value_max is not None: + df = DataFrame( + df[ + (df[column].abs() >= value_min) + & (df[column].abs() <= value_max) + ] + ) + elif value_min is not None: + df = DataFrame(df[df[column].abs() >= value_min]) + elif value_max is not None: + df = DataFrame(df[df[column].abs() <= value_max]) + else: + df = DataFrame(df.sort_values(by=column, ascending=False)) + + return df.reset_index(drop=True) + + def _get_stat( + self, + metric: Literal["open_interest", "volume", "DEX", "GEX"], + moneyness: Optional[Literal["otm", "itm"]] = None, + date: Optional[str] = None, + ) -> Dict: + """Return the metric with keys: "total", "expiration", "strike". + This method is not intended to be called directly. + """ + # pylint: disable=import-outside-toplevel + from numpy import inf, nan + from pandas import DataFrame, concat + + df = self.dataframe + + if metric in ["DEX", "GEX"]: + if not self.has_greeks: + raise OpenBBError("Greeks were not found within the data.") + df[metric] = abs(df[metric]) + + total_calls = df[df.option_type == "call"][metric].sum() + total_puts = df[df.option_type == "put"][metric].sum() + total_metric = total_calls + total_puts + total_metric_dict = { + "Calls": total_calls, + "Puts": total_puts, + "Total": total_metric, + "PCR": round(total_puts / total_calls, 4) if total_calls != 0 else 0, + } + + df = DataFrame(df[df[metric].notnull()]) # type: ignore + df["expiration"] = df.expiration.astype(str) + + if moneyness is not None: + df_calls = DataFrame( + df[df.strike >= df.underlying_price].query("option_type == 'call'") + if moneyness == "otm" + else df[df.strike <= df.underlying_price].query("option_type == 'call'") + ) + df_puts = DataFrame( + df[df.strike <= df.underlying_price].query("option_type == 'put'") + if moneyness == "otm" + else df[df.strike >= df.underlying_price].query("option_type == 'put'") + ) + df = concat([df_calls, df_puts]) + + if date is not None: + date = self._get_nearest_expiration(date) + df = DataFrame(df[df["expiration"].astype(str) == date]) + + by_expiration = df.groupby("expiration")[[metric]].sum()[[metric]].copy() + by_expiration = by_expiration.rename(columns={metric: "Total"}) # type: ignore + by_expiration["Calls"] = df[df.option_type == "call"].groupby("expiration")[metric].sum().copy() # type: ignore + by_expiration["Puts"] = df[df.option_type == "put"].groupby("expiration")[metric].sum().copy() # type: ignore + by_expiration["PCR"] = round(by_expiration["Puts"] / by_expiration["Calls"], 4) + by_expiration["Net Percent"] = round( + (by_expiration["Total"] / total_metric) * 100, 4 + ) + by_expiration = ( + by_expiration[["Calls", "Puts", "Total", "Net Percent", "PCR"]] + .replace({0: None, inf: None, nan: None}) + .dropna(how="all", axis=0) + ) + by_expiration.index.name = "Expiration" + by_expiration_dict = by_expiration.reset_index().to_dict(orient="records") + by_strike = df.groupby("strike")[[metric]].sum()[[metric]].copy() + by_strike = by_strike.rename(columns={metric: "Total"}) # type: ignore + by_strike["Calls"] = df[df.option_type == "call"].groupby("strike")[metric].sum().copy() # type: ignore + by_strike["Puts"] = df[df.option_type == "put"].groupby("strike")[metric].sum().copy() # type: ignore + by_strike["PCR"] = round(by_strike["Puts"] / by_strike["Calls"], 4) + by_strike["Net Percent"] = round((by_strike["Total"] / total_metric) * 100, 4) + by_strike = ( + by_strike[["Calls", "Puts", "Total", "Net Percent", "PCR"]] + .replace({0: None, inf: None, nan: None}) + .dropna(how="all", axis=0) + ) + by_strike.index.name = "Strike" + by_strike_dict = by_strike.reset_index().to_dict(orient="records") + + return { + "total": total_metric_dict, + "expiration": by_expiration_dict, + "strike": by_strike_dict, + } + + def _get_nearest_expiration( + self, date: Optional[Union[str, int]] = None, df: Optional["DataFrame"] = None + ) -> str: + """Return the nearest expiration date to the given date or number of days until expiry. + This method is not intended to be called directly. + + Parameters + ---------- + date: Optional[Union[str, int]] + The expiration date, or days until expiry, to use. + + Returns + ------- + str + The nearest expiration date. + """ + # pylint: disable=import-outside-toplevel + from datetime import timedelta # noqa + from pandas import DataFrame, Series, to_datetime + + df = df if df is not None else self.dataframe + if isinstance(date, int): + if not hasattr(df, "dte"): + date = (datetime.today() + timedelta(days=date)).strftime("%Y-%m-%d") + else: + dataframe = df + dataframe = dataframe[dataframe.dte >= 0] + days = -1 if date == 0 else date + nearest = (dataframe.dte - days).abs().idxmin() # type: ignore + return dataframe.loc[nearest, "expiration"].strftime("%Y-%m-%d") + elif date is None: + date = to_datetime( + df.eod_date.iloc[0] + if hasattr(df, "eod_date") + else datetime.today().strftime("%Y-%m-%d") + ) # type: ignore + else: + date = to_datetime(date) # type: ignore + + expirations = Series(to_datetime(self.expirations)) # type: ignore + nearest = DataFrame(expirations - date) + nearest_exp = abs(nearest[0].astype("int64")).idxmin() + + return expirations.loc[nearest_exp].strftime("%Y-%m-%d") # type: ignore + + def _get_nearest_otm_strikes( + self, + date: Optional[Union[str, int]] = None, + underlying_price: Optional[float] = None, + moneyness: Optional[float] = None, + ) -> Dict: + """Get the nearest put and call strikes at a given percent OTM from the underlying price. + This method is not intended to be called directly. + + Parameters + ---------- + date: Optional[Union[str, int]] + The expiration date, or days until expiry, to use. + moneyness: Optional[float] + The target percent OTM, expressed as a percent between 0 and 100. Default is 0.25%. + underlying_price: Optional[float] + Only supply this is if the underlying price is not a returned field. + + Returns + ------- + Dict[str, float] + Dictionary of the upper (call) and lower (put) strike prices. + """ + # pylint: disable=import-outside-toplevel + from pandas import Series + + if moneyness is None: + moneyness = 0.25 + + if 0 < moneyness < 100: + moneyness = moneyness / 100 + + if moneyness > 100 or moneyness < 0: + raise OpenBBError( + "Error: Moneyness must be expressed as a percentage between 0 and 100" + ) + + df = self.dataframe + + if underlying_price is None and not hasattr(df, "underlying_price"): + raise OpenBBError( + "Error: underlying_price must be provided if underlying_price is not available" + ) + + if date is not None: + date = self._get_nearest_expiration(date) + df = df[df.expiration.astype(str) == date] + strikes = Series(df.strike.unique().tolist()) + + last_price = ( + underlying_price + if underlying_price is not None + else df.underlying_price.iloc[0] + ) + strikes = Series(self.strikes) + + upper = last_price * (1 + moneyness) # type: ignore + lower = last_price * (1 - moneyness) # type: ignore + nearest_call = (upper - strikes).abs().idxmin() + call = strikes[nearest_call] + nearest_put = (lower - strikes).abs().idxmin() + put = strikes[nearest_put] + otm_strikes = {"call": call, "put": put} + + return otm_strikes + + def _get_nearest_strike( + self, + option_type: Literal["call", "put"], + days: Optional[Union[int, str]] = None, + strike: Optional[float] = None, + price_col: Optional[str] = None, + force_otm: bool = True, + ) -> Union[float, None]: + """ + Get the strike to the target option type, price, and number of days until expiry. + This method is not intended to be called directly. + + Parameters + ---------- + option_type: Literal["call", "put"] + The option type to use when selecting the bid or ask price. + days: int + The target number of days until expiry. Default is 30 days. + strike: float + The target strike price. Default is the last price of the underlying stock. + price_col: str + The price column to use for the calculation. + force_otm: bool + If True, the nearest OTM strike is returned. Default is True. + + Returns + ------- + float + The closest strike price to the target price and number of days until expiry. + """ + # pylint: disable=import-outside-toplevel + from pandas import Series + + if option_type not in ["call", "put"]: + raise OpenBBError("Error: option_type must be either 'call' or 'put'") + + chains = self.dataframe + days = -1 if days == 0 else days + + if days is None: + days = 30 + + dte_estimate = self._get_nearest_expiration(days) + df = ( + chains[chains.expiration.astype(str) == dte_estimate] + .query("`option_type` == @option_type") + .copy() + ) + if strike is None: + strike = df.underlying_price.iloc[0] + + if price_col is not None: + df = df[df[price_col].notnull()] # type: ignore + + if df.empty or len(df) == 0: + return None + + if force_otm is False: + strikes = Series(df.strike.unique().tolist()) + nearest = (strikes - strike).abs().idxmin() + return strikes.iloc[nearest] + + nearest = ( + df[df.strike <= strike] if option_type == "put" else df[df.strike >= strike] + ) + + if nearest.empty or len(nearest) == 0: # type: ignore + return None + + nearest = ( + nearest.query("strike.idxmax()") # type: ignore + if option_type == "put" + else nearest.query("strike.idxmin()") # type: ignore + ) + + return nearest.strike + + def straddle( + self, + days: Optional[int] = None, + strike: Optional[float] = None, + underlying_price: Optional[float] = None, + ) -> "DataFrame": + """ + Calculate the cost of a straddle by DTE. Use a negative strike price for short options. + + Parameters + ---------- + days: Optional[int] + The target number of days until expiry. Default is 30 days. + strike: Optional[float] + The target strike price. Enter a negative value for short options. + Default is the last price of the underlying stock. + underlying_price: Optional[float] + Only supply this is if the underlying price is not a returned field. + + Returns + ------- + DataFrame + Pandas DataFrame with the results. + Strike 1 is the nearest call strike, + Strike 2 is the nearest put strike. + """ + # pylint: disable=import-outside-toplevel + from numpy import inf + from pandas import Series + + short: bool = False + + chains = self.dataframe + + if days is None: + days = 30 + + if days == 0: + days = -1 + + dte_estimate = self._get_nearest_expiration(days) + + chains = chains[chains.expiration.astype(str) == dte_estimate] + + if not hasattr(chains, "underlying_price") and underlying_price is None: + raise OpenBBError( + "Error: underlying_price must be provided if underlying_price is not available" + ) + underlying_price = ( + underlying_price + if underlying_price is not None + else chains.underlying_price.iloc[0] + ) + + force_otm = True + + if strike is None and not hasattr(chains, "underlying_price"): + raise OpenBBError( + "Error: strike must be provided if underlying_price is not available" + ) + + if strike is not None: + force_otm = False + + if strike is None: + strike = underlying_price + + if strike is not None and strike < 0: + short = True + + strike_price = abs(strike) # type: ignore + bid_ask = "bid" if short else "ask" + call_price_col = self._identify_price_col(chains, "call", bid_ask) # type: ignore + put_price_col = self._identify_price_col(chains, "put", bid_ask) # type: ignore + call_strike_estimate = self._get_nearest_strike("call", days, strike_price, call_price_col, force_otm) # type: ignore + # If a strike price is supplied, the put strike is the same as the call strike. + # Otherwise, the put strike is the nearest OTM put strike to the last price. + + put_strike_estimate = self._get_nearest_strike( + "put", days, strike_price, put_price_col, force_otm + ) # type: ignore + call_premium = chains[chains.strike == call_strike_estimate].query( # type: ignore + "`option_type` == 'call'" + )[ + call_price_col + ] + put_premium = chains[chains.strike == put_strike_estimate].query( # type: ignore + "`option_type` == 'put'" + )[ + put_price_col + ] + if call_premium.empty or put_premium.empty: + raise OpenBBError( + "Error: No premium data found for the selected strikes." + f" Call: {call_strike_estimate}, Put: {put_strike_estimate}" + ) + put_premium = put_premium.values[0] + call_premium = call_premium.values[0] + dte = chains[chains.expiration.astype(str) == dte_estimate]["dte"].unique()[0] # type: ignore + straddle_cost = call_premium + put_premium # type: ignore + straddle_dict: Dict = {} + + # Includes the as-of date if it is historical EOD data. + if hasattr(chains, "eod_date"): + straddle_dict.update({"Date": chains.eod_date.iloc[0]}) + + straddle_dict.update( + { + "Symbol": chains.underlying_symbol.unique()[0], + "Underlying Price": underlying_price, + "Expiration": dte_estimate, + "DTE": dte, + "Strike 1": call_strike_estimate, + "Strike 2": put_strike_estimate, + "Strike 1 Premium": call_premium, + "Strike 2 Premium": put_premium, + "Cost": straddle_cost * -1 if short else straddle_cost, + "Cost Percent": round( + straddle_cost / underlying_price * 100, ndigits=4 + ), + "Breakeven Upper": call_strike_estimate + straddle_cost, + "Breakeven Upper Percent": round( + ((call_strike_estimate + straddle_cost) / underlying_price * 100) + - 100, + ndigits=4, + ), + "Breakeven Lower": put_strike_estimate - straddle_cost, + "Breakeven Lower Percent": round( + -100 + + (put_strike_estimate - straddle_cost) / underlying_price * 100, + ndigits=4, + ), + "Max Profit": abs(straddle_cost) if short else inf, + "Max Loss": inf if short else straddle_cost * -1, + } + ) + straddle = Series( + data=straddle_dict.values(), index=list(straddle_dict) # type: ignore + ) + straddle.name = "Short Straddle" if short else "Long Straddle" + straddle.loc["Payoff Ratio"] = round( + abs(straddle.loc["Max Profit"] / straddle.loc["Max Loss"]), ndigits=4 + ) + + return straddle.to_frame() + + def strangle( + self, + days: Optional[int] = None, + moneyness: Optional[float] = None, + underlying_price: Optional[float] = None, + ) -> "DataFrame": + """ + Calculate the cost of a strangle by DTE and % moneyness. Use a negative value for moneyness for short options. + + Parameters + ---------- + days: int + The target number of days until expiry. Default is 30 days. + moneyness: float + The percentage of OTM moneyness, expressed as a percent between -100 < 0 < 100. + Enter a negative number for short options. Default is 5%. + underlying_price: Optional[float] + Only supply this is if the underlying price is not a returned field. + + Returns + ------- + DataFrame + Pandas DataFrame with the results. + Strike 1 is the nearest call strike. + Strike 2 is the nearest put strike. + """ + # pylint: disable=import-outside-toplevel + from numpy import inf + from pandas import Series + + if days is None: + days = 30 + + if moneyness is None: + moneyness = 5 + + short: bool = False + + if moneyness < 0: + short = True + moneyness = abs(moneyness) + + bid_ask = "bid" if short else "ask" + + chains = self.dataframe + dte_estimate = self._get_nearest_expiration(days) + chains = chains[chains["expiration"].astype(str) == dte_estimate] + call_price_col = self._identify_price_col(chains, "call", bid_ask) # type: ignore + put_price_col = self._identify_price_col(chains, "put", bid_ask) # type: ignore + + if underlying_price is None and not hasattr(chains, "underlying_price"): + raise OpenBBError( + "Error: underlying_price must be provided if underlying_price is not available" + ) + + underlying_price = ( + underlying_price + if underlying_price is not None + else chains.underlying_price.iloc[0] + ) + + strikes = self._get_nearest_otm_strikes( + dte_estimate, underlying_price, moneyness + ) + call_strike_estimate = self._get_nearest_strike( + "call", days, strikes.get("call"), call_price_col, force_otm=False + ) + put_strike_estimate = self._get_nearest_strike( + "put", days, strikes.get("put"), put_price_col, force_otm=False + ) + call_premium = chains[chains.strike == call_strike_estimate].query( # type: ignore + "`option_type` == 'call'" + )[ + call_price_col + ] + put_premium = chains[chains.strike == put_strike_estimate].query( # type: ignore + "`option_type` == 'put'" + )[ + put_price_col + ] + + if call_premium.empty or put_premium.empty: + raise OpenBBError( + "Error: No premium data found for the selected strikes." + f" Call: {call_strike_estimate}, Put: {put_strike_estimate}" + ) + put_premium = put_premium.values[0] + call_premium = call_premium.values[0] + + dte = chains[chains.expiration.astype(str) == dte_estimate]["dte"].unique()[0] # type: ignore + strangle_cost = call_premium + put_premium + underlying_price = ( + underlying_price + if underlying_price is not None + else chains.underlying_price.iloc[0] + ) + strangle_dict: Dict = {} + # Includes the as-of date if it is historical EOD data. + if hasattr(chains, "eod_date"): + strangle_dict.update({"Date": chains.eod_date.iloc[0]}) + + strangle_dict.update( + { + "Symbol": chains.underlying_symbol.unique()[0], + "Underlying Price": underlying_price, + "Expiration": dte_estimate, + "DTE": dte, + "Strike 1": call_strike_estimate, + "Strike 2": put_strike_estimate, + "Strike 1 Premium": call_premium, + "Strike 2 Premium": put_premium, + "Cost": strangle_cost * -1 if short else strangle_cost, + "Cost Percent": round( + strangle_cost / underlying_price * 100, ndigits=4 + ), + "Breakeven Upper": call_strike_estimate + strangle_cost, + "Breakeven Upper Percent": round( + ((call_strike_estimate + strangle_cost) / underlying_price * 100) + - 100, + ndigits=4, + ), + "Breakeven Lower": put_strike_estimate - strangle_cost, + "Breakeven Lower Percent": round( + ( + -100 + + (put_strike_estimate - strangle_cost) / underlying_price * 100 + ), + ndigits=4, + ), + "Max Profit": abs(strangle_cost) if short else inf, + "Max Loss": inf if short else strangle_cost * -1, + } + ) + strangle = Series( + data=strangle_dict.values(), + index=list(strangle_dict), # type: ignore + ) + strangle.name = "Short Strangle" if short else "Long Strangle" + strangle.loc["Payoff Ratio"] = round( + abs(strangle.loc["Max Profit"] / strangle.loc["Max Loss"]), ndigits=4 + ) + + return strangle.to_frame() + + def vertical_call_spread( + self, + days: Optional[int] = None, + sold: Optional[float] = None, + bought: Optional[float] = None, + underlying_price: Optional[float] = None, + ) -> "DataFrame": + """ + Calculate the vertical call spread for the target DTE. + A bull call spread is when the sold strike is above the bought strike. + + Parameters + ---------- + days: int + The target number of days until expiry. This value will be used to get the nearest valid DTE. + Default is 30 days. + sold: float + The target strike price for the short leg of the vertical call spread. + Default is 7.5% above the last price of the underlying. + bought: float + The target strike price for the long leg of the vertical call spread. + Default is 2.5% above the last price of the underlying. + underlying_price: Optional[float] + Only supply this is if the underlying price is not a returned field. + + Returns + ------- + DataFrame + Pandas DataFrame with the results. + Strike 1 is the sold call strike. + Strike 2 is the bought call strike. + """ + # pylint: disable=import-outside-toplevel + from numpy import nan + from pandas import DataFrame, Series + + chains = self.dataframe + + if not hasattr(chains, "underlying_price") and underlying_price is None: + raise OpenBBError( + "Error: underlying_price must be provided if underlying_price is not available" + ) + + if days is None: + days = 30 + + if days == 0: + days = -1 + + dte_estimate = self._get_nearest_expiration(days) + + chains = chains[chains["expiration"].astype(str) == dte_estimate].query( + "`option_type` == 'call'" + ) + + last_price = ( + underlying_price + if underlying_price is not None + else chains.underlying_price.iloc[0] + ) + + if bought is None: + bought = last_price * 1.0250 + + if sold is None: + sold = last_price * 1.0750 + + bid = self._identify_price_col(chains, "call", "bid") + ask = self._identify_price_col(chains, "call", "ask") + sold = self._get_nearest_strike("call", days, sold, bid, False) + bought = self._get_nearest_strike("call", days, bought, ask, False) + + sold_premium = chains[chains.strike == sold][bid].iloc[0] * (-1) # type: ignore + bought_premium = chains[chains.strike == bought][ask].iloc[0] # type: ignore + dte = chains[chains.expiration.astype(str) == dte_estimate]["dte"].unique()[0] # type: ignore + spread_cost = bought_premium + sold_premium + breakeven_price = bought + spread_cost + max_profit = sold - bought - spread_cost # type: ignore + call_spread_: Dict = {} + if sold != bought and spread_cost != 0: + # Includes the as-of date if it is historical EOD data. + if hasattr(chains, "eod_date"): + call_spread_.update({"Date": chains.eod_date.iloc[0]}) + + call_spread_.update( + { + "Symbol": chains.underlying_symbol.unique()[0], + "Underlying Price": last_price, + "Expiration": dte_estimate, + "DTE": dte, + "Strike 1": sold, + "Strike 2": bought, + "Strike 1 Premium": sold_premium, + "Strike 2 Premium": bought_premium, + "Cost": spread_cost, + "Cost Percent": round(spread_cost / last_price * 100, ndigits=4), + "Breakeven Lower": breakeven_price, + "Breakeven Lower Percent": round( + (breakeven_price / last_price * 100) - 100, ndigits=4 + ), + "Breakeven Upper": nan, + "Breakeven Upper Percent": nan, + "Max Profit": max_profit, + "Max Loss": spread_cost * -1, + } + ) + call_spread = Series( + data=call_spread_.values(), index=list(call_spread_) # type: ignore + ) + call_spread.name = "Bull Call Spread" + + if call_spread.loc["Cost"] < 0: + call_spread.loc["Max Profit"] = call_spread.loc["Cost"] * -1 + call_spread.loc["Max Loss"] = -1 * ( + bought - sold + call_spread.loc["Cost"] # type: ignore + ) + lower = bought if sold > bought else sold # type: ignore + call_spread.loc["Breakeven Upper"] = ( + lower + call_spread.loc["Max Profit"] + ) + call_spread.loc["Breakeven Upper Percent"] = round( + (breakeven_price / last_price * 100) - 100, ndigits=4 + ) + call_spread.loc["Breakeven Lower"] = nan + call_spread.loc["Breakeven Lower Percent"] = nan + call_spread.name = "Bear Call Spread" + + call_spread.loc["Payoff Ratio"] = round( + abs(call_spread.loc["Max Profit"] / call_spread.loc["Max Loss"]), + ndigits=4, + ) + + return call_spread.to_frame() + + return DataFrame() + + def vertical_put_spread( + self, + days: Optional[int] = None, + sold: Optional[float] = None, + bought: Optional[float] = None, + underlying_price: Optional[float] = None, + ) -> "DataFrame": + """ + Calculate the vertical put spread for the target DTE. + A bear put spread is when the bought strike is above the sold strike. + + Parameters + ---------- + days: int + The target number of days until expiry. This value will be used to get the nearest valid DTE. + Default is 30 days. + sold: float + The target strike price for the short leg of the vertical put spread. + Default is 7.5% below the last price of the underlying. + bought: float + The target strike price for the long leg of the vertical put spread. + Default is 2.5% below the last price of the underlying. + underlying_price: Optional[float] + Only supply this is if the underlying price is not a returned field. + + Returns + ------- + DataFrame + Pandas DataFrame with the results. + Strike 1 is the sold strike. + Strike 2 is the bought strike. + """ + # pylint: disable=import-outside-toplevel + from numpy import nan + from pandas import DataFrame, Series + + chains = self.dataframe + + if not hasattr(chains, "underlying_price") and underlying_price is None: + raise OpenBBError( + "Error: underlying_price must be provided if underlying_price is not available" + ) + + if days is None: + days = 30 + + if days == 0: + days = -1 + + dte_estimate = self._get_nearest_expiration(days) + + chains = chains[chains["expiration"].astype(str) == dte_estimate].query( + "`option_type` == 'put'" + ) + + last_price = ( + underlying_price + if underlying_price is not None + else chains.underlying_price.iloc[0] + ) + + if bought is None: + bought = last_price * 0.9750 + + if sold is None: + sold = last_price * 0.9250 + + bid = self._identify_price_col(chains, "put", "bid") + ask = self._identify_price_col(chains, "put", "ask") + sold = self._get_nearest_strike("put", days, sold, bid, False) + bought = self._get_nearest_strike("put", days, bought, ask, False) + + sold_premium = chains[chains.strike == sold][bid].iloc[0] * (-1) # type: ignore + bought_premium = chains[chains.strike == bought][ask].iloc[0] # type: ignore + dte = chains[chains.expiration.astype(str) == dte_estimate]["dte"].unique()[0] # type: ignore + spread_cost = bought_premium + sold_premium + max_profit = abs(spread_cost) + breakeven_price = sold - max_profit + max_loss = (sold - bought - max_profit) * -1 # type: ignore + put_spread_: Dict = {} + if sold != bought and max_loss != 0: + # Includes the as-of date if it is historical EOD data. + if hasattr(chains, "eod_date"): + put_spread_.update({"Date": chains.eod_date.iloc[0]}) + + put_spread_.update( + { + "Symbol": chains.underlying_symbol.unique()[0], + "Underlying Price": last_price, + "Expiration": dte_estimate, + "DTE": dte, + "Strike 1": sold, + "Strike 2": bought, + "Strike 1 Premium": sold_premium, + "Strike 2 Premium": bought_premium, + "Cost": spread_cost, + "Cost Percent": round(max_profit / last_price * 100, ndigits=4), + "Breakeven Lower": nan, + "Breakeven Lower Percent": nan, + "Breakeven Upper": breakeven_price, + "Breakeven Upper Percent": ( + 100 - round((breakeven_price / last_price) * 100, ndigits=4) + ), + "Max Profit": max_profit, + "Max Loss": max_loss, + } + ) + + put_spread = Series(data=put_spread_.values(), index=put_spread_) + put_spread.name = "Bull Put Spread" + if put_spread.loc["Cost"] > 0: + put_spread.loc["Max Profit"] = bought - sold - spread_cost # type: ignore + put_spread.loc["Max Loss"] = spread_cost * (-1) + put_spread.loc["Breakeven Lower"] = bought - spread_cost + put_spread.loc["Breakeven Lower Percent"] = 100 - round( + (breakeven_price / last_price) * 100, ndigits=4 + ) + put_spread.loc["Breakeven Upper"] = nan + put_spread.loc["Breakeven Upper Percent"] = nan + put_spread.name = "Bear Put Spread" + + put_spread.loc["Payoff Ratio"] = round( + abs(put_spread.loc["Max Profit"] / put_spread.loc["Max Loss"]), + ndigits=4, + ) + + return put_spread.to_frame() + + return DataFrame() + + def synthetic_long( + self, + days: Optional[int] = 30, + strike: float = 0, + underlying_price: Optional[float] = None, + ) -> "DataFrame": + """ + Calculate the cost of a synthetic long position at a given strike. + It is expressed as the difference between a bought call and a sold put. + + Parameters + ----------- + days: int + The target number of days until expiry. Default is 30 days. + strike: float + The target strike price. Default is the last price of the underlying stock. + underlying_price: Optional[float] + Only supply this is if the underlying price is not a returned field. + + Returns + ------- + DataFrame + Pandas DataFrame with the results. + Strike1 is the purchased call strike. + Strike2 is the sold put strike. + """ + # pylint: disable=import-outside-toplevel + from numpy import inf, nan + from pandas import DataFrame + + chains = self.dataframe + + if not hasattr(chains, "underlying_price") and underlying_price is None: + raise OpenBBError( + "Error: underlying_price must be provided if underlying_price is not available" + ) + + if days is None: + days = 30 + + if days == 0: + days = -1 + + dte_estimate = self._get_nearest_expiration(days) + chains = DataFrame(chains[chains["expiration"].astype(str) == dte_estimate]) + last_price = ( + underlying_price + if underlying_price is not None + else chains.underlying_price.iloc[0] + ) + bid = self._identify_price_col(chains, "put", "bid") + ask = self._identify_price_col(chains, "call", "ask") + strike_price = last_price if strike == 0 else strike + sold = self._get_nearest_strike("put", days, strike_price, bid, False) + bought = self._get_nearest_strike("call", days, strike_price, ask, False) + put_premium = chains[chains.strike == sold].query("`option_type` == 'put'")[bid] # type: ignore + call_premium = chains[chains.strike == bought].query("`option_type` == 'call'")[ask] # type: ignore + + if call_premium.empty or put_premium.empty: + raise OpenBBError( + "Error: No premium data found for the selected strikes." + f" Call: {bought}, Put: {sold}" + ) + + put_premium = put_premium.values[0] * (-1) + call_premium = call_premium.values[0] + dte = chains[chains.expiration.astype(str) == dte_estimate]["dte"].unique()[0] # type: ignore + position_cost = call_premium + put_premium + breakeven = ((sold + bought) / 2) + position_cost # type: ignore + synthetic_long_dict: Dict = {} + # Includes the as-of date if it is historical EOD data. + if hasattr(chains, "eod_date"): + synthetic_long_dict.update({"Date": chains.eod_date.iloc[0]}) + + synthetic_long_dict.update( + { + "Symbol": chains.underlying_symbol.unique()[0], + "Underlying Price": last_price, + "Expiration": dte_estimate, + "DTE": dte, + "Strike 1": sold, + "Strike 2": bought, + "Strike 1 Premium": call_premium, + "Strike 2 Premium": put_premium, + "Cost": position_cost, + "Cost Percent": round(position_cost / last_price * 100, ndigits=4), + "Breakeven Lower": nan, + "Breakeven Lower Percent": nan, + "Breakeven Upper": breakeven, + "Breakeven Upper Percent": round( + ((breakeven - last_price) / last_price) * 100, ndigits=4 + ), + "Max Profit": inf, + "Max Loss": breakeven * (-1), + } + ) + + synthetic_long = DataFrame( + data=synthetic_long_dict.values(), index=list(synthetic_long_dict) # type: ignore + ).rename(columns={0: "Synthetic Long"}) + + return synthetic_long + + def synthetic_short( + self, + days: Optional[int] = None, + strike: float = 0, + underlying_price: Optional[float] = None, + ) -> "DataFrame": + """ + Calculate the cost of a synthetic short position at a given strike. + It is expressed as the difference between a sold call and a purchased put. + + Parameters + ----------- + days: int + The target number of days until expiry. Default is 30 days. + strike: float + The target strike price. Default is the last price of the underlying stock. + underlying_price: Optional[float] + Only supply this is if the underlying price is not a returned field. + + Returns + ------- + DataFrame + Pandas DataFrame with the results. + Strike 1 is the sold call strike. + Strike 2 is the purchased put strike. + """ + # pylint: disable=import-outside-toplevel + from numpy import inf, nan + from pandas import DataFrame + + chains = self.dataframe + + if not hasattr(chains, "underlying_price") and underlying_price is None: + raise OpenBBError( + "Error: underlying_price must be provided if underlying_price is not available" + ) + + if days is None: + days = 30 + + if days == 0: + days = -1 + + dte_estimate = self._get_nearest_expiration(days) + chains = DataFrame(chains[chains["expiration"].astype(str) == dte_estimate]) + last_price = ( + underlying_price + if underlying_price is not None + else chains.underlying_price.iloc[0] + ) + bid = self._identify_price_col(chains, "call", "bid") + ask = self._identify_price_col(chains, "put", "ask") + strike_price = last_price if strike == 0 else strike + sold = self._get_nearest_strike("call", days, strike_price, bid, False) + bought = self._get_nearest_strike("put", days, strike_price, ask, False) + put_premium = chains[chains.strike == bought].query("`option_type` == 'put'")[ask] # type: ignore + call_premium = chains[chains.strike == sold].query("`option_type` == 'call'")[bid] # type: ignore + + if call_premium.empty or put_premium.empty: + raise OpenBBError( + "Error: No premium data found for the selected strikes." + f" Call: {bought}, Put: {sold}" + ) + + put_premium = put_premium.values[0] + call_premium = call_premium.values[0] * (-1) + dte = chains[chains.expiration.astype(str) == dte_estimate]["dte"].unique()[0] # type: ignore + position_cost = call_premium + put_premium + breakeven = ((sold + bought) / 2) + position_cost # type: ignore + synthetic_short_dict: Dict = {} + # Includes the as-of date if it is historical EOD data. + if hasattr(chains, "eod_date"): + synthetic_short_dict.update({"Date": chains.eod_date.iloc[0]}) + + synthetic_short_dict.update( + { + "Symbol": chains.underlying_symbol.unique()[0], + "Underlying Price": last_price, + "Expiration": dte_estimate, + "DTE": dte, + "Strike 1": sold, + "Strike 2": bought, + "Strike 1 Premium": call_premium, + "Strike 2 Premium": put_premium, + "Cost": position_cost, + "Cost Percent": round(position_cost / last_price * 100, ndigits=4), + "Breakeven Lower": breakeven, + "Breakeven Lower Percent": round( + ((breakeven - last_price) / last_price) * 100, ndigits=4 + ), + "Breakeven Upper": nan, + "Breakeven Upper Percent": nan, + "Max Profit": breakeven, + "Max Loss": inf, + } + ) + + synthetic_short = DataFrame( + data=synthetic_short_dict.values(), index=list(synthetic_short_dict) # type: ignore + ).rename(columns={0: "Synthetic Short"}) + + return synthetic_short + + # pylint: disable=too-many-branches + def strategies( # noqa: PLR0912 + self, + days: Optional[List] = None, + straddle_strike: Optional[float] = None, + strangle_moneyness: Optional[List[float]] = None, + synthetic_longs: Optional[List[float]] = None, + synthetic_shorts: Optional[List[float]] = None, + vertical_calls: Optional[List[tuple]] = None, + vertical_puts: Optional[List[tuple]] = None, + underlying_price: Optional[float] = None, + ) -> "DataFrame": + """ + Get options strategies for all, or a list of, DTE(s). + Currently supports straddles, strangles, synthetic long and shorts, and vertical spreads. + + Multiple strategies, expirations, and % moneyness can be returned. + + A negative value for `straddle_strike` or `strangle_moneyness` returns short options. + + A synthetic long/short position is a bought/sold call and sold/bought put at the same strike. + + A sold call strike that is lower than the bought strike, + or a sold put strike that is higher than the bought strike, + is a bearish vertical spread. + + The default state returns a long straddle for each expiry. + + Parameters + ---------- + days: list[int] + List of DTE(s) to get strategies for. Enter a single value, or multiple as a list. + Select all dates by entering, -1. Large chains may take a few seconds to process all dates. + Defaults to [20,40,60,90,180,360]. + straddle_strike: float + The target strike price for the straddle. Defaults to the last price of the underlying stock, + and both strikes will always be on OTM side. + Enter a strike price to force call and put strikes to be the same. + strangle_moneyness: List[float] + List of OTM moneyness to target, expressed as a percent value between 0 and 100. + Enter a single value, or multiple as a list. + synthetic_long: List[float] + List of strikes for a synthetic long position. + synthetic_short: List[float] + List of strikes for a synthetic short position. + vertical_calls: List[tuple] + Call strikes for vertical spreads, entered as a list of paired tuples - [(sold strike, bought strike)]. + vertical_puts: List[float] + Put strikes for vertical spreads, entered as a list of paired tuples - [(sold strike, bought strike)]. + underlying_price: Optional[float] + Only supply this is if the underlying price is not a returned field. + + Returns + ------- + DataFrame + Pandas DataFrame with the results. + """ + # pylint: disable=import-outside-toplevel + from pandas import DataFrame, concat + + def to_clean_list(x): + if x is None: + return None + return [x] if not isinstance(x, list) else x + + def split_into_tuples(x): + """Split a list into paired tuples.""" + if x is None: + return None + if isinstance(x, tuple): + return [x] + if isinstance(x, list) and isinstance(x[0], tuple): + return x + paired_tuples: List = [] + for i in range(0, len(x), 2): + paired_tuples.append((x[i], x[i + 1])) + return paired_tuples + + # Check if all items are False + if ( # pylint: disable=too-many-boolean-expressions + straddle_strike is None + and strangle_moneyness is None + and synthetic_longs is None + and synthetic_shorts is None + and vertical_calls is None + and vertical_puts is None + ): + straddle_strike = 0 + + chains = self.dataframe + bid = self._identify_price_col(chains, "call", "bid") + chains = chains[chains[bid].notnull()].query("`dte` >= 0") + days = ( + chains.dte.unique().tolist() + if days == -1 + else days if days else [20, 40, 60, 90, 180, 360] + ) + # Allows a single input to be passed instead of a list. + days = [days] if isinstance(days, int) else days # type: ignore[list-item] + + strangle_moneyness = strangle_moneyness or [0.0] + strangle_moneyness = to_clean_list(strangle_moneyness) # type: ignore + synthetic_longs = to_clean_list(synthetic_longs) # type: ignore + synthetic_shorts = to_clean_list(synthetic_shorts) # type: ignore + vertical_calls = split_into_tuples(vertical_calls) # type: ignore + vertical_puts = split_into_tuples(vertical_puts) # type: ignore + + days_list: List = [] + strategies: DataFrame = DataFrame() + straddles: DataFrame = DataFrame() + strangles: DataFrame = DataFrame() + strangles_: DataFrame = DataFrame() + synthetic_longs_df: DataFrame = DataFrame() + _synthetic_longs: DataFrame = DataFrame() + synthetic_shorts_df: DataFrame = DataFrame() + _synthetic_shorts: DataFrame = DataFrame() + call_spreads: DataFrame = DataFrame() + put_spreads: DataFrame = DataFrame() + + # Get the nearest expiration date for each supplied date and + # discard any duplicates found - i.e, [29,30] will yield only one result. + for day in days: # type: ignore + _day = day or -1 + days_list.append(self._get_nearest_expiration(_day)) + days = sorted(set(days_list)) + + if vertical_calls is not None: + for c in vertical_calls: + c_strike1 = c[0] + c_strike2 = c[1] + for day in days: + call_spread = self.vertical_call_spread( + day, c_strike1, c_strike2, underlying_price + ) + if not call_spread.empty: + call_spreads = concat([call_spreads, call_spread.transpose()]) + + if vertical_puts: + for c in vertical_puts: + p_strike1 = c[0] + p_strike2 = c[1] + for day in days: + put_spread = self.vertical_put_spread( + day, p_strike1, p_strike2, underlying_price + ) + if not put_spread.empty: + put_spreads = concat([put_spreads, put_spread.transpose()]) + + if straddle_strike or straddle_strike == 0: + straddle_strike = None if straddle_strike == 0 else straddle_strike + for day in days: + straddle = self.straddle( + day, straddle_strike, underlying_price + ).transpose() + if not straddle.empty and straddle.iloc[0]["Cost"] != 0: + straddles = concat([straddles, straddle]) + + if strangle_moneyness and strangle_moneyness[0] != 0: + for day in days: + for moneyness in strangle_moneyness: + strangle = self.strangle( + day, moneyness, underlying_price + ).transpose() + if strangle.iloc[0]["Cost"] != 0: + strangles_ = concat([strangles_, strangle]) + + strangles = concat([strangles, strangles_]) + strangles = strangles.query("`Strike 1` != `Strike 2`").drop_duplicates() + + if synthetic_longs: + strikes = synthetic_longs + for day in days: + for strike in strikes: + _synthetic_long = self.synthetic_long( + day, strike, underlying_price + ).transpose() + if ( + not _synthetic_long.empty + and _synthetic_long.iloc[0]["Strike 1 Premium"] != 0 + ): + _synthetic_longs = concat([_synthetic_longs, _synthetic_long]) + + synthetic_longs_df = concat([synthetic_longs_df, _synthetic_longs]) + + if synthetic_shorts: + strikes = synthetic_shorts + for day in days: + for strike in strikes: + _synthetic_short = self.synthetic_short( + day, strike, underlying_price + ).transpose() + if ( + not _synthetic_short.empty + and _synthetic_short.iloc[0]["Strike 1 Premium"] != 0 + ): + _synthetic_shorts = concat( + [_synthetic_shorts, _synthetic_short] + ) + + if not _synthetic_shorts.empty: + synthetic_shorts_df = concat([synthetic_shorts_df, _synthetic_shorts]) + + strategies = concat( + [ + straddles, + strangles, + synthetic_longs_df, + synthetic_shorts_df, + call_spreads, + put_spreads, + ] + ) + + if strategies.empty: + raise OpenBBError("No strategies found for the given parameters.") + + strategies = strategies.reset_index().rename(columns={"index": "Strategy"}) + strategies = ( + strategies.set_index(["Expiration", "DTE"]) + .sort_index() + .drop(columns=["Symbol"]) + ) + return strategies.reset_index() + + def skew( + self, + date: Optional[Union[str, int]] = None, + moneyness: Optional[float] = None, + underlying_price: Optional[float] = None, + ) -> "DataFrame": + """Return skewness of the options, either vertical or horizontal. + + The vertical skew for each expiry and option is calculated by subtracting the IV of the ATM call or put. + Returns only where the IV is greater than 0. + + Horizontal skew is returned if a value for moneyness is supplied. + It is expressed as the difference between skews of two equidistant OTM strikes (the closest call and put). + + Default state is 20% moneyness with 30 days until expiry. + + Parameters + ----------- + date: Optional[Union[str, int]] + The expiration date, or days until expiry, to use. Enter -1 for all expirations. + Large chains (SPY, SPX, etc.) may take a few seconds to process when using -1. + moneyness: float + The moneyness to target for calculating horizontal skew. + underlying_price: Optional[float] + Only supply this is if the underlying price is not a returned field. + + Returns + -------- + DataFrame + Pandas DataFrame with the results. + """ + # pylint: disable=import-outside-toplevel + from pandas import DataFrame, concat + + data = self.dataframe + expiration: str = "" + if self.has_iv is False: + raise OpenBBError("Error: 'implied_volatility' field not found.") + + data = DataFrame(data[data.implied_volatility > 0]) # type: ignore + call_price_col = self._identify_price_col(data, "call", "ask") + put_price_col = self._identify_price_col(data, "put", "ask") + + if not hasattr(data, "underlying_price") and underlying_price is None: + raise OpenBBError( + "Error: underlying_price must be provided if underlying_price is not available" + ) + + if moneyness is not None and date is None: + date = -1 + + if moneyness is None and date is None: + date = 30 + moneyness = 20 + + if date is None: + date = 30 # type: ignore + + if date == -1: + date = None + + if date is not None: + if date not in self.expirations: + expiration = self._get_nearest_expiration(date, df=data) + data = data[data.expiration.astype(str) == expiration] + + days = data.dte.unique().tolist() # type: ignore + + call_skew = DataFrame() + put_skew = DataFrame() + skew_df = DataFrame() + puts = DataFrame() + calls = DataFrame() + + # Horizontal skew + if moneyness is not None: + atm_call_iv = DataFrame() + atm_put_iv = DataFrame() + for day in days: + strikes = self._get_nearest_otm_strikes( + date=day, moneyness=moneyness, underlying_price=underlying_price + ) + atm_call_strike = self._get_nearest_strike( # noqa:F841 + "call", day, underlying_price, call_price_col, False + ) + call_strike = self._get_nearest_strike( # noqa:F841 + "call", day, strikes["call"], call_price_col, False + ) + _calls = ( + data[data.dte == day] + .query("`option_type` == 'call'") # type: ignore + .copy() + ) + last_price = ( + underlying_price + if underlying_price is not None + else _calls.underlying_price.iloc[0] + ) + if len(_calls) > 0: + call_iv = _calls[_calls.strike == call_strike][ + ["expiration", "strike", "implied_volatility"] + ] + atm_call = _calls[_calls.strike == atm_call_strike][ + ["expiration", "strike", "implied_volatility"] + ] + if len(atm_call) > 0: + calls = concat([calls, call_iv]) # type: ignore + atm_call_iv = concat([atm_call_iv, atm_call]) # type: ignore + + atm_put_strike = self._get_nearest_strike( # noqa:F841 + "put", day, last_price, put_price_col, False + ) + put_strike = self._get_nearest_strike( # noqa:F841 + "put", day, strikes["put"], put_price_col, False + ) + _puts = ( + data[data.dte == day] + .query("`option_type` == 'put'") # type: ignore + .copy() + ) + if len(_puts) > 0: + put_iv = _puts[_puts.strike == put_strike][ + ["expiration", "strike", "implied_volatility"] + ] + atm_put = _puts[_puts.strike == atm_put_strike][ + ["expiration", "strike", "implied_volatility"] + ] + if len(atm_put) > 0: # type: ignore + puts = concat([puts, put_iv]) # type: ignore + atm_put_iv = concat([atm_put_iv, atm_put]) # type: ignore + + if calls.empty or puts.empty: + raise OpenBBError( + "Error: Not enough information to complete the operation." + " Likely due to zero values in the IV field of the expiration." + ) + + calls = calls.drop_duplicates(subset=["expiration"]).set_index("expiration") # type: ignore + atm_call_iv = atm_call_iv.drop_duplicates(subset=["expiration"]).set_index( # type: ignore + "expiration" + ) + puts = puts.drop_duplicates(subset=["expiration"]).set_index("expiration") # type: ignore + atm_put_iv = atm_put_iv.drop_duplicates(subset=["expiration"]).set_index( # type: ignore + "expiration" + ) + skew_df["Call Strike"] = calls["strike"] + skew_df["Call IV"] = calls["implied_volatility"] + skew_df["Call ATM IV"] = atm_call_iv["implied_volatility"] + skew_df["Call Skew"] = skew_df["Call IV"] - skew_df["Call ATM IV"] + skew_df["Put Strike"] = puts["strike"] + skew_df["Put IV"] = puts["implied_volatility"] + skew_df["Put ATM IV"] = atm_put_iv["implied_volatility"] + skew_df["Put Skew"] = skew_df["Put IV"] - skew_df["Put ATM IV"] + skew_df["ATM Skew"] = skew_df["Call ATM IV"] - skew_df["Put ATM IV"] + skew_df["IV Skew"] = skew_df["Call Skew"] - skew_df["Put Skew"] + skew_df = skew_df.reset_index().rename(columns={"expiration": "Expiration"}) + skew_df["Expiration"] = skew_df["Expiration"].astype(str) + + return skew_df + + # Vertical skew + + calls = data[data.option_type == "call"] + puts = data[data.option_type == "put"] + + for day in days: + atm_call_strike = self._get_nearest_strike( + "call", day, underlying_price, force_otm=False + ) # noqa:F841 + _calls = calls[calls["dte"] == day][ + ["expiration", "option_type", "strike", "implied_volatility"] + ] + + if len(_calls) > 0: + call = _calls.set_index("expiration").copy() # type: ignore + call_atm_iv = call.query("`strike` == @atm_call_strike")[ + "implied_volatility" + ] + if len(call_atm_iv) > 0: + call["ATM IV"] = call_atm_iv.iloc[0] + call["Skew"] = call["implied_volatility"] - call["ATM IV"] + call_skew = concat([call_skew, call]) + + atm_put_strike = self._get_nearest_strike( + "put", day, force_otm=False + ) # noqa:F841 + _puts = puts[puts["dte"] == day][ + ["expiration", "option_type", "strike", "implied_volatility"] + ] + + if len(_puts) > 0: + put = _puts.set_index("expiration").copy() # type: ignore + put_atm_iv = put.query("`strike` == @atm_put_strike")[ + "implied_volatility" + ] + if len(put_atm_iv) > 0: + put["ATM IV"] = put_atm_iv.iloc[0] + put["Skew"] = put["implied_volatility"] - put["ATM IV"] + put_skew = concat([put_skew, put]) + if call_skew.empty or put_skew.empty: + raise OpenBBError( + "Error: Not enough information to complete the operation. Likely due to zero values in the IV field." + ) + call_skew = call_skew.set_index(["strike", "option_type"], append=True) + put_skew = put_skew.set_index(["strike", "option_type"], append=True) + skew_df = concat([call_skew, put_skew]).sort_index().reset_index() + cols = ["Expiration", "Strike", "Option Type", "IV", "ATM IV", "Skew"] + skew_df.columns = cols + skew_df["Expiration"] = skew_df["Expiration"].astype(str) + + return skew_df diff --git a/openbb_platform/core/openbb_core/py.typed b/openbb_platform/core/openbb_core/py.typed new file mode 100644 index 0000000000000000000000000000000000000000..e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 diff --git a/openbb_platform/core/poetry.lock b/openbb_platform/core/poetry.lock new file mode 100644 index 0000000000000000000000000000000000000000..b4e8975995214d9ea43fe2cdf32e33f8f594037d --- /dev/null +++ b/openbb_platform/core/poetry.lock @@ -0,0 +1,1685 @@ +# This file is automatically @generated by Poetry 2.1.2 and should not be changed by hand. + +[[package]] +name = "aiohappyeyeballs" +version = "2.6.1" +description = "Happy Eyeballs for asyncio" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohappyeyeballs-2.6.1-py3-none-any.whl", hash = "sha256:f349ba8f4b75cb25c99c5c2d84e997e485204d2902a9597802b0371f09331fb8"}, + {file = "aiohappyeyeballs-2.6.1.tar.gz", hash = "sha256:c3f9d0113123803ccadfdf3f0faa505bc78e6a72d1cc4806cbd719826e943558"}, +] + +[[package]] +name = "aiohttp" +version = "3.11.18" +description = "Async http client/server framework (asyncio)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiohttp-3.11.18-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:96264854fedbea933a9ca4b7e0c745728f01380691687b7365d18d9e977179c4"}, + {file = "aiohttp-3.11.18-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:9602044ff047043430452bc3a2089743fa85da829e6fc9ee0025351d66c332b6"}, + {file = "aiohttp-3.11.18-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5691dc38750fcb96a33ceef89642f139aa315c8a193bbd42a0c33476fd4a1609"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:554c918ec43f8480b47a5ca758e10e793bd7410b83701676a4782672d670da55"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a4076a2b3ba5b004b8cffca6afe18a3b2c5c9ef679b4d1e9859cf76295f8d4f"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:767a97e6900edd11c762be96d82d13a1d7c4fc4b329f054e88b57cdc21fded94"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f0ddc9337a0fb0e727785ad4f41163cc314376e82b31846d3835673786420ef1"}, + {file = "aiohttp-3.11.18-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f414f37b244f2a97e79b98d48c5ff0789a0b4b4609b17d64fa81771ad780e415"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fdb239f47328581e2ec7744ab5911f97afb10752332a6dd3d98e14e429e1a9e7"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:f2c50bad73ed629cc326cc0f75aed8ecfb013f88c5af116f33df556ed47143eb"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a8d8f20c39d3fa84d1c28cdb97f3111387e48209e224408e75f29c6f8e0861d"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:106032eaf9e62fd6bc6578c8b9e6dc4f5ed9a5c1c7fb2231010a1b4304393421"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:b491e42183e8fcc9901d8dcd8ae644ff785590f1727f76ca86e731c61bfe6643"}, + {file = "aiohttp-3.11.18-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ad8c745ff9460a16b710e58e06a9dec11ebc0d8f4dd82091cefb579844d69868"}, + {file = "aiohttp-3.11.18-cp310-cp310-win32.whl", hash = "sha256:8e57da93e24303a883146510a434f0faf2f1e7e659f3041abc4e3fb3f6702a9f"}, + {file = "aiohttp-3.11.18-cp310-cp310-win_amd64.whl", hash = "sha256:cc93a4121d87d9f12739fc8fab0a95f78444e571ed63e40bfc78cd5abe700ac9"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:427fdc56ccb6901ff8088544bde47084845ea81591deb16f957897f0f0ba1be9"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2c828b6d23b984255b85b9b04a5b963a74278b7356a7de84fda5e3b76866597b"}, + {file = "aiohttp-3.11.18-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5c2eaa145bb36b33af1ff2860820ba0589e165be4ab63a49aebfd0981c173b66"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d518ce32179f7e2096bf4e3e8438cf445f05fedd597f252de9f54c728574756"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:0700055a6e05c2f4711011a44364020d7a10fbbcd02fbf3e30e8f7e7fddc8717"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8bd1cde83e4684324e6ee19adfc25fd649d04078179890be7b29f76b501de8e4"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73b8870fe1c9a201b8c0d12c94fe781b918664766728783241a79e0468427e4f"}, + {file = "aiohttp-3.11.18-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:25557982dd36b9e32c0a3357f30804e80790ec2c4d20ac6bcc598533e04c6361"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7e889c9df381a2433802991288a61e5a19ceb4f61bd14f5c9fa165655dcb1fd1"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:9ea345fda05bae217b6cce2acf3682ce3b13d0d16dd47d0de7080e5e21362421"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9f26545b9940c4b46f0a9388fd04ee3ad7064c4017b5a334dd450f616396590e"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:3a621d85e85dccabd700294494d7179ed1590b6d07a35709bb9bd608c7f5dd1d"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:9c23fd8d08eb9c2af3faeedc8c56e134acdaf36e2117ee059d7defa655130e5f"}, + {file = "aiohttp-3.11.18-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9e6b0e519067caa4fd7fb72e3e8002d16a68e84e62e7291092a5433763dc0dd"}, + {file = "aiohttp-3.11.18-cp311-cp311-win32.whl", hash = "sha256:122f3e739f6607e5e4c6a2f8562a6f476192a682a52bda8b4c6d4254e1138f4d"}, + {file = "aiohttp-3.11.18-cp311-cp311-win_amd64.whl", hash = "sha256:e6f3c0a3a1e73e88af384b2e8a0b9f4fb73245afd47589df2afcab6b638fa0e6"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:63d71eceb9cad35d47d71f78edac41fcd01ff10cacaa64e473d1aec13fa02df2"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d1929da615840969929e8878d7951b31afe0bac883d84418f92e5755d7b49508"}, + {file = "aiohttp-3.11.18-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d0aebeb2392f19b184e3fdd9e651b0e39cd0f195cdb93328bd124a1d455cd0e"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3849ead845e8444f7331c284132ab314b4dac43bfae1e3cf350906d4fff4620f"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5e8452ad6b2863709f8b3d615955aa0807bc093c34b8e25b3b52097fe421cb7f"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b8d2b42073611c860a37f718b3d61ae8b4c2b124b2e776e2c10619d920350ec"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:40fbf91f6a0ac317c0a07eb328a1384941872f6761f2e6f7208b63c4cc0a7ff6"}, + {file = "aiohttp-3.11.18-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ff5625413fec55216da5eaa011cf6b0a2ed67a565914a212a51aa3755b0009"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:7f33a92a2fde08e8c6b0c61815521324fc1612f397abf96eed86b8e31618fdb4"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:11d5391946605f445ddafda5eab11caf310f90cdda1fd99865564e3164f5cff9"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3cc314245deb311364884e44242e00c18b5896e4fe6d5f942e7ad7e4cb640adb"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:0f421843b0f70740772228b9e8093289924359d306530bcd3926f39acbe1adda"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:e220e7562467dc8d589e31c1acd13438d82c03d7f385c9cd41a3f6d1d15807c1"}, + {file = "aiohttp-3.11.18-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ab2ef72f8605046115bc9aa8e9d14fd49086d405855f40b79ed9e5c1f9f4faea"}, + {file = "aiohttp-3.11.18-cp312-cp312-win32.whl", hash = "sha256:12a62691eb5aac58d65200c7ae94d73e8a65c331c3a86a2e9670927e94339ee8"}, + {file = "aiohttp-3.11.18-cp312-cp312-win_amd64.whl", hash = "sha256:364329f319c499128fd5cd2d1c31c44f234c58f9b96cc57f743d16ec4f3238c8"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:474215ec618974054cf5dc465497ae9708543cbfc312c65212325d4212525811"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:6ced70adf03920d4e67c373fd692123e34d3ac81dfa1c27e45904a628567d804"}, + {file = "aiohttp-3.11.18-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2d9f6c0152f8d71361905aaf9ed979259537981f47ad099c8b3d81e0319814bd"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a35197013ed929c0aed5c9096de1fc5a9d336914d73ab3f9df14741668c0616c"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:540b8a1f3a424f1af63e0af2d2853a759242a1769f9f1ab053996a392bd70118"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f9e6710ebebfce2ba21cee6d91e7452d1125100f41b906fb5af3da8c78b764c1"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f8af2ef3b4b652ff109f98087242e2ab974b2b2b496304063585e3d78de0b000"}, + {file = "aiohttp-3.11.18-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:28c3f975e5ae3dbcbe95b7e3dcd30e51da561a0a0f2cfbcdea30fc1308d72137"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c28875e316c7b4c3e745172d882d8a5c835b11018e33432d281211af35794a93"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:13cd38515568ae230e1ef6919e2e33da5d0f46862943fcda74e7e915096815f3"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:0e2a92101efb9f4c2942252c69c63ddb26d20f46f540c239ccfa5af865197bb8"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e6d3e32b8753c8d45ac550b11a1090dd66d110d4ef805ffe60fa61495360b3b2"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:ea4cf2488156e0f281f93cc2fd365025efcba3e2d217cbe3df2840f8c73db261"}, + {file = "aiohttp-3.11.18-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d4df95ad522c53f2b9ebc07f12ccd2cb15550941e11a5bbc5ddca2ca56316d7"}, + {file = "aiohttp-3.11.18-cp313-cp313-win32.whl", hash = "sha256:cdd1bbaf1e61f0d94aced116d6e95fe25942f7a5f42382195fd9501089db5d78"}, + {file = "aiohttp-3.11.18-cp313-cp313-win_amd64.whl", hash = "sha256:bdd619c27e44382cf642223f11cfd4d795161362a5a1fc1fa3940397bc89db01"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:469ac32375d9a716da49817cd26f1916ec787fc82b151c1c832f58420e6d3533"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3cec21dd68924179258ae14af9f5418c1ebdbba60b98c667815891293902e5e0"}, + {file = "aiohttp-3.11.18-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b426495fb9140e75719b3ae70a5e8dd3a79def0ae3c6c27e012fc59f16544a4a"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ad2f41203e2808616292db5d7170cccf0c9f9c982d02544443c7eb0296e8b0c7"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5bc0ae0a5e9939e423e065a3e5b00b24b8379f1db46046d7ab71753dfc7dd0e1"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:fe7cdd3f7d1df43200e1c80f1aed86bb36033bf65e3c7cf46a2b97a253ef8798"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5199be2a2f01ffdfa8c3a6f5981205242986b9e63eb8ae03fd18f736e4840721"}, + {file = "aiohttp-3.11.18-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7ccec9e72660b10f8e283e91aa0295975c7bd85c204011d9f5eb69310555cf30"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1596ebf17e42e293cbacc7a24c3e0dc0f8f755b40aff0402cb74c1ff6baec1d3"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:eab7b040a8a873020113ba814b7db7fa935235e4cbaf8f3da17671baa1024863"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:5d61df4a05476ff891cff0030329fee4088d40e4dc9b013fac01bc3c745542c2"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:46533e6792e1410f9801d09fd40cbbff3f3518d1b501d6c3c5b218f427f6ff08"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c1b90407ced992331dd6d4f1355819ea1c274cc1ee4d5b7046c6761f9ec11829"}, + {file = "aiohttp-3.11.18-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a2fd04ae4971b914e54fe459dd7edbbd3f2ba875d69e057d5e3c8e8cac094935"}, + {file = "aiohttp-3.11.18-cp39-cp39-win32.whl", hash = "sha256:b2f317d1678002eee6fe85670039fb34a757972284614638f82b903a03feacdc"}, + {file = "aiohttp-3.11.18-cp39-cp39-win_amd64.whl", hash = "sha256:5e7007b8d1d09bce37b54111f593d173691c530b80f27c6493b928dabed9e6ef"}, + {file = "aiohttp-3.11.18.tar.gz", hash = "sha256:ae856e1138612b7e412db63b7708735cff4d38d0399f6a5435d3dac2669f558a"}, +] + +[package.dependencies] +aiohappyeyeballs = ">=2.3.0" +aiosignal = ">=1.1.2" +async-timeout = {version = ">=4.0,<6.0", markers = "python_version < \"3.11\""} +attrs = ">=17.3.0" +frozenlist = ">=1.1.1" +multidict = ">=4.5,<7.0" +propcache = ">=0.2.0" +yarl = ">=1.17.0,<2.0" + +[package.extras] +speedups = ["Brotli ; platform_python_implementation == \"CPython\"", "aiodns (>=3.2.0) ; sys_platform == \"linux\" or sys_platform == \"darwin\"", "brotlicffi ; platform_python_implementation != \"CPython\""] + +[[package]] +name = "aiosignal" +version = "1.3.2" +description = "aiosignal: a list of registered asynchronous callbacks" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "aiosignal-1.3.2-py2.py3-none-any.whl", hash = "sha256:45cde58e409a301715980c2b01d0c28bdde3770d8290b5eb2173759d9acb31a5"}, + {file = "aiosignal-1.3.2.tar.gz", hash = "sha256:a8c255c66fafb1e499c9351d0bf32ff2d8a0321595ebac3b93713656d2436f54"}, +] + +[package.dependencies] +frozenlist = ">=1.1.0" + +[[package]] +name = "annotated-types" +version = "0.7.0" +description = "Reusable constraint types to use with typing.Annotated" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "annotated_types-0.7.0-py3-none-any.whl", hash = "sha256:1f02e8b43a8fbbc3f3e0d4f0f4bfc8131bcb4eebe8849b8e5c773f3a1c582a53"}, + {file = "annotated_types-0.7.0.tar.gz", hash = "sha256:aff07c09a53a08bc8cfccb9c85b05f1aa9a2a6f23728d790723543408344ce89"}, +] + +[[package]] +name = "anyio" +version = "4.9.0" +description = "High level compatibility layer for multiple asynchronous event loop implementations" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, + {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, +] + +[package.dependencies] +exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} +idna = ">=2.8" +sniffio = ">=1.1" +typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} + +[package.extras] +doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] +test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] +trio = ["trio (>=0.26.1)"] + +[[package]] +name = "async-timeout" +version = "5.0.1" +description = "Timeout context manager for asyncio programs" +optional = false +python-versions = ">=3.8" +groups = ["main"] +markers = "python_version < \"3.11\"" +files = [ + {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, + {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, +] + +[[package]] +name = "attrs" +version = "25.3.0" +description = "Classes Without Boilerplate" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3"}, + {file = "attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b"}, +] + +[package.extras] +benchmark = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +cov = ["cloudpickle ; platform_python_implementation == \"CPython\"", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +dev = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pre-commit-uv", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier"] +tests = ["cloudpickle ; platform_python_implementation == \"CPython\"", "hypothesis", "mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-xdist[psutil]"] +tests-mypy = ["mypy (>=1.11.1) ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\"", "pytest-mypy-plugins ; platform_python_implementation == \"CPython\" and python_version >= \"3.10\""] + +[[package]] +name = "backoff" +version = "2.2.1" +description = "Function decoration for backoff and retry" +optional = false +python-versions = ">=3.7,<4.0" +groups = ["main"] +files = [ + {file = "backoff-2.2.1-py3-none-any.whl", hash = "sha256:63579f9a0628e06278f7e47b7d7d5b6ce20dc65c5e96a6f3ca99a6adca0396e8"}, + {file = "backoff-2.2.1.tar.gz", hash = "sha256:03f829f5bb1923180821643f8753b0502c3b682293992485b0eef2807afa5cba"}, +] + +[[package]] +name = "certifi" +version = "2025.4.26" +description = "Python package for providing Mozilla's CA Bundle." +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "certifi-2025.4.26-py3-none-any.whl", hash = "sha256:30350364dfe371162649852c63336a15c70c6510c2ad5015b21c2345311805f3"}, + {file = "certifi-2025.4.26.tar.gz", hash = "sha256:0a816057ea3cdefcef70270d2c515e4506bbc954f417fa5ade2021213bb8f0c6"}, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.1" +description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, + {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, + {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, + {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, + {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, + {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, + {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, + {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, + {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, + {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, +] + +[[package]] +name = "click" +version = "8.1.8" +description = "Composable command line interface toolkit" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, + {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, +] + +[package.dependencies] +colorama = {version = "*", markers = "platform_system == \"Windows\""} + +[[package]] +name = "colorama" +version = "0.4.6" +description = "Cross-platform colored terminal text." +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" +groups = ["main"] +markers = "platform_system == \"Windows\"" +files = [ + {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, + {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, +] + +[[package]] +name = "distro" +version = "1.9.0" +description = "Distro - an OS platform information API" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, + {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, +] + +[[package]] +name = "exceptiongroup" +version = "1.2.2" +description = "Backport of PEP 654 (exception groups)" +optional = false +python-versions = ">=3.7" +groups = ["main"] +markers = "python_version < \"3.11\"" +files = [ + {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, + {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, +] + +[package.extras] +test = ["pytest (>=6)"] + +[[package]] +name = "fastapi" +version = "0.115.12" +description = "FastAPI framework, high performance, easy to learn, fast to code, ready for production" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "fastapi-0.115.12-py3-none-any.whl", hash = "sha256:e94613d6c05e27be7ffebdd6ea5f388112e5e430c8f7d6494a9d1d88d43e814d"}, + {file = "fastapi-0.115.12.tar.gz", hash = "sha256:1e2c2a2646905f9e83d32f04a3f86aff4a286669c6c950ca95b5fd68c2602681"}, +] + +[package.dependencies] +pydantic = ">=1.7.4,<1.8 || >1.8,<1.8.1 || >1.8.1,<2.0.0 || >2.0.0,<2.0.1 || >2.0.1,<2.1.0 || >2.1.0,<3.0.0" +starlette = ">=0.40.0,<0.47.0" +typing-extensions = ">=4.8.0" + +[package.extras] +all = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "itsdangerous (>=1.1.0)", "jinja2 (>=3.1.5)", "orjson (>=3.2.1)", "pydantic-extra-types (>=2.0.0)", "pydantic-settings (>=2.0.0)", "python-multipart (>=0.0.18)", "pyyaml (>=5.3.1)", "ujson (>=4.0.1,!=4.0.2,!=4.1.0,!=4.2.0,!=4.3.0,!=5.0.0,!=5.1.0)", "uvicorn[standard] (>=0.12.0)"] +standard = ["email-validator (>=2.0.0)", "fastapi-cli[standard] (>=0.0.5)", "httpx (>=0.23.0)", "jinja2 (>=3.1.5)", "python-multipart (>=0.0.18)", "uvicorn[standard] (>=0.12.0)"] + +[[package]] +name = "frozenlist" +version = "1.6.0" +description = "A list-like structure which implements collections.abc.MutableSequence" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "frozenlist-1.6.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:e6e558ea1e47fd6fa8ac9ccdad403e5dd5ecc6ed8dda94343056fa4277d5c65e"}, + {file = "frozenlist-1.6.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:f4b3cd7334a4bbc0c472164f3744562cb72d05002cc6fcf58adb104630bbc352"}, + {file = "frozenlist-1.6.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9799257237d0479736e2b4c01ff26b5c7f7694ac9692a426cb717f3dc02fff9b"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3a7bb0fe1f7a70fb5c6f497dc32619db7d2cdd53164af30ade2f34673f8b1fc"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:36d2fc099229f1e4237f563b2a3e0ff7ccebc3999f729067ce4e64a97a7f2869"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f27a9f9a86dcf00708be82359db8de86b80d029814e6693259befe82bb58a106"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75ecee69073312951244f11b8627e3700ec2bfe07ed24e3a685a5979f0412d24"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f2c7d5aa19714b1b01a0f515d078a629e445e667b9da869a3cd0e6fe7dec78bd"}, + {file = "frozenlist-1.6.0-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:69bbd454f0fb23b51cadc9bdba616c9678e4114b6f9fa372d462ff2ed9323ec8"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7daa508e75613809c7a57136dec4871a21bca3080b3a8fc347c50b187df4f00c"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:89ffdb799154fd4d7b85c56d5fa9d9ad48946619e0eb95755723fffa11022d75"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:920b6bd77d209931e4c263223381d63f76828bec574440f29eb497cf3394c249"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d3ceb265249fb401702fce3792e6b44c1166b9319737d21495d3611028d95769"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:52021b528f1571f98a7d4258c58aa8d4b1a96d4f01d00d51f1089f2e0323cb02"}, + {file = "frozenlist-1.6.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:0f2ca7810b809ed0f1917293050163c7654cefc57a49f337d5cd9de717b8fad3"}, + {file = "frozenlist-1.6.0-cp310-cp310-win32.whl", hash = "sha256:0e6f8653acb82e15e5443dba415fb62a8732b68fe09936bb6d388c725b57f812"}, + {file = "frozenlist-1.6.0-cp310-cp310-win_amd64.whl", hash = "sha256:f1a39819a5a3e84304cd286e3dc62a549fe60985415851b3337b6f5cc91907f1"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:ae8337990e7a45683548ffb2fee1af2f1ed08169284cd829cdd9a7fa7470530d"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:8c952f69dd524558694818a461855f35d36cc7f5c0adddce37e962c85d06eac0"}, + {file = "frozenlist-1.6.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8f5fef13136c4e2dee91bfb9a44e236fff78fc2cd9f838eddfc470c3d7d90afe"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:716bbba09611b4663ecbb7cd022f640759af8259e12a6ca939c0a6acd49eedba"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7b8c4dc422c1a3ffc550b465090e53b0bf4839047f3e436a34172ac67c45d595"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b11534872256e1666116f6587a1592ef395a98b54476addb5e8d352925cb5d4a"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c6eceb88aaf7221f75be6ab498dc622a151f5f88d536661af3ffc486245a626"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:62c828a5b195570eb4b37369fcbbd58e96c905768d53a44d13044355647838ff"}, + {file = "frozenlist-1.6.0-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e1c6bd2c6399920c9622362ce95a7d74e7f9af9bfec05fff91b8ce4b9647845a"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:49ba23817781e22fcbd45fd9ff2b9b8cdb7b16a42a4851ab8025cae7b22e96d0"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:431ef6937ae0f853143e2ca67d6da76c083e8b1fe3df0e96f3802fd37626e606"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9d124b38b3c299ca68433597ee26b7819209cb8a3a9ea761dfe9db3a04bba584"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:118e97556306402e2b010da1ef21ea70cb6d6122e580da64c056b96f524fbd6a"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fb3b309f1d4086b5533cf7bbcf3f956f0ae6469664522f1bde4feed26fba60f1"}, + {file = "frozenlist-1.6.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54dece0d21dce4fdb188a1ffc555926adf1d1c516e493c2914d7c370e454bc9e"}, + {file = "frozenlist-1.6.0-cp311-cp311-win32.whl", hash = "sha256:654e4ba1d0b2154ca2f096bed27461cf6160bc7f504a7f9a9ef447c293caf860"}, + {file = "frozenlist-1.6.0-cp311-cp311-win_amd64.whl", hash = "sha256:3e911391bffdb806001002c1f860787542f45916c3baf764264a52765d5a5603"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:c5b9e42ace7d95bf41e19b87cec8f262c41d3510d8ad7514ab3862ea2197bfb1"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ca9973735ce9f770d24d5484dcb42f68f135351c2fc81a7a9369e48cf2998a29"}, + {file = "frozenlist-1.6.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6ac40ec76041c67b928ca8aaffba15c2b2ee3f5ae8d0cb0617b5e63ec119ca25"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95b7a8a3180dfb280eb044fdec562f9b461614c0ef21669aea6f1d3dac6ee576"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:c444d824e22da6c9291886d80c7d00c444981a72686e2b59d38b285617cb52c8"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb52c8166499a8150bfd38478248572c924c003cbb45fe3bcd348e5ac7c000f9"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b35298b2db9c2468106278537ee529719228950a5fdda686582f68f247d1dc6e"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d108e2d070034f9d57210f22fefd22ea0d04609fc97c5f7f5a686b3471028590"}, + {file = "frozenlist-1.6.0-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e1be9111cb6756868ac242b3c2bd1f09d9aea09846e4f5c23715e7afb647103"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:94bb451c664415f02f07eef4ece976a2c65dcbab9c2f1705b7031a3a75349d8c"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:d1a686d0b0949182b8faddea596f3fc11f44768d1f74d4cad70213b2e139d821"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:ea8e59105d802c5a38bdbe7362822c522230b3faba2aa35c0fa1765239b7dd70"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:abc4e880a9b920bc5020bf6a431a6bb40589d9bca3975c980495f63632e8382f"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9a79713adfe28830f27a3c62f6b5406c37376c892b05ae070906f07ae4487046"}, + {file = "frozenlist-1.6.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a0318c2068e217a8f5e3b85e35899f5a19e97141a45bb925bb357cfe1daf770"}, + {file = "frozenlist-1.6.0-cp312-cp312-win32.whl", hash = "sha256:853ac025092a24bb3bf09ae87f9127de9fe6e0c345614ac92536577cf956dfcc"}, + {file = "frozenlist-1.6.0-cp312-cp312-win_amd64.whl", hash = "sha256:2bdfe2d7e6c9281c6e55523acd6c2bf77963cb422fdc7d142fb0cb6621b66878"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:1d7fb014fe0fbfee3efd6a94fc635aeaa68e5e1720fe9e57357f2e2c6e1a647e"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:01bcaa305a0fdad12745502bfd16a1c75b14558dabae226852f9159364573117"}, + {file = "frozenlist-1.6.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8b314faa3051a6d45da196a2c495e922f987dc848e967d8cfeaee8a0328b1cd4"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da62fecac21a3ee10463d153549d8db87549a5e77eefb8c91ac84bb42bb1e4e3"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1eb89bf3454e2132e046f9599fbcf0a4483ed43b40f545551a39316d0201cd1"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d18689b40cb3936acd971f663ccb8e2589c45db5e2c5f07e0ec6207664029a9c"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e67ddb0749ed066b1a03fba812e2dcae791dd50e5da03be50b6a14d0c1a9ee45"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fc5e64626e6682638d6e44398c9baf1d6ce6bc236d40b4b57255c9d3f9761f1f"}, + {file = "frozenlist-1.6.0-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:437cfd39564744ae32ad5929e55b18ebd88817f9180e4cc05e7d53b75f79ce85"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:62dd7df78e74d924952e2feb7357d826af8d2f307557a779d14ddf94d7311be8"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:a66781d7e4cddcbbcfd64de3d41a61d6bdde370fc2e38623f30b2bd539e84a9f"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:482fe06e9a3fffbcd41950f9d890034b4a54395c60b5e61fae875d37a699813f"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:e4f9373c500dfc02feea39f7a56e4f543e670212102cc2eeb51d3a99c7ffbde6"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e69bb81de06827147b7bfbaeb284d85219fa92d9f097e32cc73675f279d70188"}, + {file = "frozenlist-1.6.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:7613d9977d2ab4a9141dde4a149f4357e4065949674c5649f920fec86ecb393e"}, + {file = "frozenlist-1.6.0-cp313-cp313-win32.whl", hash = "sha256:4def87ef6d90429f777c9d9de3961679abf938cb6b7b63d4a7eb8a268babfce4"}, + {file = "frozenlist-1.6.0-cp313-cp313-win_amd64.whl", hash = "sha256:37a8a52c3dfff01515e9bbbee0e6063181362f9de3db2ccf9bc96189b557cbfd"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:46138f5a0773d064ff663d273b309b696293d7a7c00a0994c5c13a5078134b64"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f88bc0a2b9c2a835cb888b32246c27cdab5740059fb3688852bf91e915399b91"}, + {file = "frozenlist-1.6.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:777704c1d7655b802c7850255639672e90e81ad6fa42b99ce5ed3fbf45e338dd"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:85ef8d41764c7de0dcdaf64f733a27352248493a85a80661f3c678acd27e31f2"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:da5cb36623f2b846fb25009d9d9215322318ff1c63403075f812b3b2876c8506"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:cbb56587a16cf0fb8acd19e90ff9924979ac1431baea8681712716a8337577b0"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c6154c3ba59cda3f954c6333025369e42c3acd0c6e8b6ce31eb5c5b8116c07e0"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2e8246877afa3f1ae5c979fe85f567d220f86a50dc6c493b9b7d8191181ae01e"}, + {file = "frozenlist-1.6.0-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7b0f6cce16306d2e117cf9db71ab3a9e8878a28176aeaf0dbe35248d97b28d0c"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1b8e8cd8032ba266f91136d7105706ad57770f3522eac4a111d77ac126a25a9b"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:e2ada1d8515d3ea5378c018a5f6d14b4994d4036591a52ceaf1a1549dec8e1ad"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:cdb2c7f071e4026c19a3e32b93a09e59b12000751fc9b0b7758da899e657d215"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:03572933a1969a6d6ab509d509e5af82ef80d4a5d4e1e9f2e1cdd22c77a3f4d2"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:77effc978947548b676c54bbd6a08992759ea6f410d4987d69feea9cd0919911"}, + {file = "frozenlist-1.6.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:a2bda8be77660ad4089caf2223fdbd6db1858462c4b85b67fbfa22102021e497"}, + {file = "frozenlist-1.6.0-cp313-cp313t-win32.whl", hash = "sha256:a4d96dc5bcdbd834ec6b0f91027817214216b5b30316494d2b1aebffb87c534f"}, + {file = "frozenlist-1.6.0-cp313-cp313t-win_amd64.whl", hash = "sha256:e18036cb4caa17ea151fd5f3d70be9d354c99eb8cf817a3ccde8a7873b074348"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:536a1236065c29980c15c7229fbb830dedf809708c10e159b8136534233545f0"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ed5e3a4462ff25ca84fb09e0fada8ea267df98a450340ead4c91b44857267d70"}, + {file = "frozenlist-1.6.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e19c0fc9f4f030fcae43b4cdec9e8ab83ffe30ec10c79a4a43a04d1af6c5e1ad"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c7c608f833897501dac548585312d73a7dca028bf3b8688f0d712b7acfaf7fb3"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:0dbae96c225d584f834b8d3cc688825911960f003a85cb0fd20b6e5512468c42"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:625170a91dd7261a1d1c2a0c1a353c9e55d21cd67d0852185a5fef86587e6f5f"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1db8b2fc7ee8a940b547a14c10e56560ad3ea6499dc6875c354e2335812f739d"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4da6fc43048b648275a220e3a61c33b7fff65d11bdd6dcb9d9c145ff708b804c"}, + {file = "frozenlist-1.6.0-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef8e7e8f2f3820c5f175d70fdd199b79e417acf6c72c5d0aa8f63c9f721646f"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:aa733d123cc78245e9bb15f29b44ed9e5780dc6867cfc4e544717b91f980af3b"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:ba7f8d97152b61f22d7f59491a781ba9b177dd9f318486c5fbc52cde2db12189"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:56a0b8dd6d0d3d971c91f1df75e824986667ccce91e20dca2023683814344791"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:5c9e89bf19ca148efcc9e3c44fd4c09d5af85c8a7dd3dbd0da1cb83425ef4983"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:1330f0a4376587face7637dfd245380a57fe21ae8f9d360c1c2ef8746c4195fa"}, + {file = "frozenlist-1.6.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2187248203b59625566cac53572ec8c2647a140ee2738b4e36772930377a533c"}, + {file = "frozenlist-1.6.0-cp39-cp39-win32.whl", hash = "sha256:2b8cf4cfea847d6c12af06091561a89740f1f67f331c3fa8623391905e878530"}, + {file = "frozenlist-1.6.0-cp39-cp39-win_amd64.whl", hash = "sha256:1255d5d64328c5a0d066ecb0f02034d086537925f1f04b50b1ae60d37afbf572"}, + {file = "frozenlist-1.6.0-py3-none-any.whl", hash = "sha256:535eec9987adb04701266b92745d6cdcef2e77669299359c3009c3404dd5d191"}, + {file = "frozenlist-1.6.0.tar.gz", hash = "sha256:b99655c32c1c8e06d111e7f41c06c29a5318cb1835df23a45518e02a47c63b68"}, +] + +[[package]] +name = "h11" +version = "0.16.0" +description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "h11-0.16.0-py3-none-any.whl", hash = "sha256:63cf8bbe7522de3bf65932fda1d9c2772064ffb3dae62d55932da54b31cb6c86"}, + {file = "h11-0.16.0.tar.gz", hash = "sha256:4e35b956cf45792e4caa5885e69fba00bdbc6ffafbfa020300e549b208ee5ff1"}, +] + +[[package]] +name = "html5lib" +version = "1.1" +description = "HTML parser based on the WHATWG HTML specification" +optional = false +python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" +groups = ["main"] +files = [ + {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, + {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, +] + +[package.dependencies] +six = ">=1.9" +webencodings = "*" + +[package.extras] +all = ["chardet (>=2.2)", "genshi", "lxml ; platform_python_implementation == \"CPython\""] +chardet = ["chardet (>=2.2)"] +genshi = ["genshi"] +lxml = ["lxml ; platform_python_implementation == \"CPython\""] + +[[package]] +name = "idna" +version = "3.10" +description = "Internationalized Domain Names in Applications (IDNA)" +optional = false +python-versions = ">=3.6" +groups = ["main"] +files = [ + {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, + {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, +] + +[package.extras] +all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +description = "Read metadata from Python packages" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd"}, + {file = "importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000"}, +] + +[package.dependencies] +zipp = ">=3.20" + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +perf = ["ipython"] +test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] +type = ["pytest-mypy"] + +[[package]] +name = "monotonic" +version = "1.6" +description = "An implementation of time.monotonic() for Python 2 & < 3.3" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "monotonic-1.6-py2.py3-none-any.whl", hash = "sha256:68687e19a14f11f26d140dd5c86f3dba4bf5df58003000ed467e0e2a69bca96c"}, + {file = "monotonic-1.6.tar.gz", hash = "sha256:3a55207bcfed53ddd5c5bae174524062935efed17792e9de2ad0205ce9ad63f7"}, +] + +[[package]] +name = "multidict" +version = "6.4.3" +description = "multidict implementation" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:32a998bd8a64ca48616eac5a8c1cc4fa38fb244a3facf2eeb14abe186e0f6cc5"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a54ec568f1fc7f3c313c2f3b16e5db346bf3660e1309746e7fccbbfded856188"}, + {file = "multidict-6.4.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a7be07e5df178430621c716a63151165684d3e9958f2bbfcb644246162007ab7"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b128dbf1c939674a50dd0b28f12c244d90e5015e751a4f339a96c54f7275e291"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:b9cb19dfd83d35b6ff24a4022376ea6e45a2beba8ef3f0836b8a4b288b6ad685"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3cf62f8e447ea2c1395afa289b332e49e13d07435369b6f4e41f887db65b40bf"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:909f7d43ff8f13d1adccb6a397094adc369d4da794407f8dd592c51cf0eae4b1"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0bb8f8302fbc7122033df959e25777b0b7659b1fd6bcb9cb6bed76b5de67afef"}, + {file = "multidict-6.4.3-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:224b79471b4f21169ea25ebc37ed6f058040c578e50ade532e2066562597b8a9"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:a7bd27f7ab3204f16967a6f899b3e8e9eb3362c0ab91f2ee659e0345445e0078"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:99592bd3162e9c664671fd14e578a33bfdba487ea64bcb41d281286d3c870ad7"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a62d78a1c9072949018cdb05d3c533924ef8ac9bcb06cbf96f6d14772c5cd451"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:3ccdde001578347e877ca4f629450973c510e88e8865d5aefbcb89b852ccc666"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:eccb67b0e78aa2e38a04c5ecc13bab325a43e5159a181a9d1a6723db913cbb3c"}, + {file = "multidict-6.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8b6fcf6054fc4114a27aa865f8840ef3d675f9316e81868e0ad5866184a6cba5"}, + {file = "multidict-6.4.3-cp310-cp310-win32.whl", hash = "sha256:f92c7f62d59373cd93bc9969d2da9b4b21f78283b1379ba012f7ee8127b3152e"}, + {file = "multidict-6.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:b57e28dbc031d13916b946719f213c494a517b442d7b48b29443e79610acd887"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:f6f19170197cc29baccd33ccc5b5d6a331058796485857cf34f7635aa25fb0cd"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:f2882bf27037eb687e49591690e5d491e677272964f9ec7bc2abbe09108bdfb8"}, + {file = "multidict-6.4.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:fbf226ac85f7d6b6b9ba77db4ec0704fde88463dc17717aec78ec3c8546c70ad"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2e329114f82ad4b9dd291bef614ea8971ec119ecd0f54795109976de75c9a852"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:1f4e0334d7a555c63f5c8952c57ab6f1c7b4f8c7f3442df689fc9f03df315c08"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:740915eb776617b57142ce0bb13b7596933496e2f798d3d15a20614adf30d229"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:255dac25134d2b141c944b59a0d2f7211ca12a6d4779f7586a98b4b03ea80508"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d4e8535bd4d741039b5aad4285ecd9b902ef9e224711f0b6afda6e38d7ac02c7"}, + {file = "multidict-6.4.3-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:30c433a33be000dd968f5750722eaa0991037be0be4a9d453eba121774985bc8"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4eb33b0bdc50acd538f45041f5f19945a1f32b909b76d7b117c0c25d8063df56"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:75482f43465edefd8a5d72724887ccdcd0c83778ded8f0cb1e0594bf71736cc0"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ce5b3082e86aee80b3925ab4928198450d8e5b6466e11501fe03ad2191c6d777"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e413152e3212c4d39f82cf83c6f91be44bec9ddea950ce17af87fbf4e32ca6b2"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:8aac2eeff69b71f229a405c0a4b61b54bade8e10163bc7b44fcd257949620618"}, + {file = "multidict-6.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ab583ac203af1d09034be41458feeab7863c0635c650a16f15771e1386abf2d7"}, + {file = "multidict-6.4.3-cp311-cp311-win32.whl", hash = "sha256:1b2019317726f41e81154df636a897de1bfe9228c3724a433894e44cd2512378"}, + {file = "multidict-6.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:43173924fa93c7486402217fab99b60baf78d33806af299c56133a3755f69589"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:1f1c2f58f08b36f8475f3ec6f5aeb95270921d418bf18f90dffd6be5c7b0e676"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:26ae9ad364fc61b936fb7bf4c9d8bd53f3a5b4417142cd0be5c509d6f767e2f1"}, + {file = "multidict-6.4.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:659318c6c8a85f6ecfc06b4e57529e5a78dfdd697260cc81f683492ad7e9435a"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e1eb72c741fd24d5a28242ce72bb61bc91f8451877131fa3fe930edb195f7054"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3cd06d88cb7398252284ee75c8db8e680aa0d321451132d0dba12bc995f0adcc"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:4543d8dc6470a82fde92b035a92529317191ce993533c3c0c68f56811164ed07"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:30a3ebdc068c27e9d6081fca0e2c33fdf132ecea703a72ea216b81a66860adde"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b038f10e23f277153f86f95c777ba1958bcd5993194fda26a1d06fae98b2f00c"}, + {file = "multidict-6.4.3-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c605a2b2dc14282b580454b9b5d14ebe0668381a3a26d0ac39daa0ca115eb2ae"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8bd2b875f4ca2bb527fe23e318ddd509b7df163407b0fb717df229041c6df5d3"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:c2e98c840c9c8e65c0e04b40c6c5066c8632678cd50c8721fdbcd2e09f21a507"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:66eb80dd0ab36dbd559635e62fba3083a48a252633164857a1d1684f14326427"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c23831bdee0a2a3cf21be057b5e5326292f60472fb6c6f86392bbf0de70ba731"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:1535cec6443bfd80d028052e9d17ba6ff8a5a3534c51d285ba56c18af97e9713"}, + {file = "multidict-6.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3b73e7227681f85d19dec46e5b881827cd354aabe46049e1a61d2f9aaa4e285a"}, + {file = "multidict-6.4.3-cp312-cp312-win32.whl", hash = "sha256:8eac0c49df91b88bf91f818e0a24c1c46f3622978e2c27035bfdca98e0e18124"}, + {file = "multidict-6.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:11990b5c757d956cd1db7cb140be50a63216af32cd6506329c2c59d732d802db"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:7a76534263d03ae0cfa721fea40fd2b5b9d17a6f85e98025931d41dc49504474"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:805031c2f599eee62ac579843555ed1ce389ae00c7e9f74c2a1b45e0564a88dd"}, + {file = "multidict-6.4.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c56c179839d5dcf51d565132185409d1d5dd8e614ba501eb79023a6cab25576b"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c64f4ddb3886dd8ab71b68a7431ad4aa01a8fa5be5b11543b29674f29ca0ba3"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:3002a856367c0b41cad6784f5b8d3ab008eda194ed7864aaa58f65312e2abcac"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3d75e621e7d887d539d6e1d789f0c64271c250276c333480a9e1de089611f790"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:995015cf4a3c0d72cbf453b10a999b92c5629eaf3a0c3e1efb4b5c1f602253bb"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a2b0fabae7939d09d7d16a711468c385272fa1b9b7fb0d37e51143585d8e72e0"}, + {file = "multidict-6.4.3-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61ed4d82f8a1e67eb9eb04f8587970d78fe7cddb4e4d6230b77eda23d27938f9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:062428944a8dc69df9fdc5d5fc6279421e5f9c75a9ee3f586f274ba7b05ab3c8"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:b90e27b4674e6c405ad6c64e515a505c6d113b832df52fdacb6b1ffd1fa9a1d1"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7d50d4abf6729921e9613d98344b74241572b751c6b37feed75fb0c37bd5a817"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:43fe10524fb0a0514be3954be53258e61d87341008ce4914f8e8b92bee6f875d"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:236966ca6c472ea4e2d3f02f6673ebfd36ba3f23159c323f5a496869bc8e47c9"}, + {file = "multidict-6.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:422a5ec315018e606473ba1f5431e064cf8b2a7468019233dcf8082fabad64c8"}, + {file = "multidict-6.4.3-cp313-cp313-win32.whl", hash = "sha256:f901a5aace8e8c25d78960dcc24c870c8d356660d3b49b93a78bf38eb682aac3"}, + {file = "multidict-6.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:1c152c49e42277bc9a2f7b78bd5fa10b13e88d1b0328221e7aef89d5c60a99a5"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:be8751869e28b9c0d368d94f5afcb4234db66fe8496144547b4b6d6a0645cfc6"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0d4b31f8a68dccbcd2c0ea04f0e014f1defc6b78f0eb8b35f2265e8716a6df0c"}, + {file = "multidict-6.4.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:032efeab3049e37eef2ff91271884303becc9e54d740b492a93b7e7266e23756"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9e78006af1a7c8a8007e4f56629d7252668344442f66982368ac06522445e375"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:daeac9dd30cda8703c417e4fddccd7c4dc0c73421a0b54a7da2713be125846be"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1f6f90700881438953eae443a9c6f8a509808bc3b185246992c4233ccee37fea"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f84627997008390dd15762128dcf73c3365f4ec0106739cde6c20a07ed198ec8"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3307b48cd156153b117c0ea54890a3bdbf858a5b296ddd40dc3852e5f16e9b02"}, + {file = "multidict-6.4.3-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ead46b0fa1dcf5af503a46e9f1c2e80b5d95c6011526352fa5f42ea201526124"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1748cb2743bedc339d63eb1bca314061568793acd603a6e37b09a326334c9f44"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:acc9fa606f76fc111b4569348cc23a771cb52c61516dcc6bcef46d612edb483b"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:31469d5832b5885adeb70982e531ce86f8c992334edd2f2254a10fa3182ac504"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:ba46b51b6e51b4ef7bfb84b82f5db0dc5e300fb222a8a13b8cd4111898a869cf"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:389cfefb599edf3fcfd5f64c0410da686f90f5f5e2c4d84e14f6797a5a337af4"}, + {file = "multidict-6.4.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:64bc2bbc5fba7b9db5c2c8d750824f41c6994e3882e6d73c903c2afa78d091e4"}, + {file = "multidict-6.4.3-cp313-cp313t-win32.whl", hash = "sha256:0ecdc12ea44bab2807d6b4a7e5eef25109ab1c82a8240d86d3c1fc9f3b72efd5"}, + {file = "multidict-6.4.3-cp313-cp313t-win_amd64.whl", hash = "sha256:7146a8742ea71b5d7d955bffcef58a9e6e04efba704b52a460134fefd10a8208"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5427a2679e95a642b7f8b0f761e660c845c8e6fe3141cddd6b62005bd133fc21"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:24a8caa26521b9ad09732972927d7b45b66453e6ebd91a3c6a46d811eeb7349b"}, + {file = "multidict-6.4.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6b5a272bc7c36a2cd1b56ddc6bff02e9ce499f9f14ee4a45c45434ef083f2459"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:edf74dc5e212b8c75165b435c43eb0d5e81b6b300a938a4eb82827119115e840"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:9f35de41aec4b323c71f54b0ca461ebf694fb48bec62f65221f52e0017955b39"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae93e0ff43b6f6892999af64097b18561691ffd835e21a8348a441e256592e1f"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5e3929269e9d7eff905d6971d8b8c85e7dbc72c18fb99c8eae6fe0a152f2e343"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fb6214fe1750adc2a1b801a199d64b5a67671bf76ebf24c730b157846d0e90d2"}, + {file = "multidict-6.4.3-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d79cf5c0c6284e90f72123f4a3e4add52d6c6ebb4a9054e88df15b8d08444c6"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2427370f4a255262928cd14533a70d9738dfacadb7563bc3b7f704cc2360fc4e"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:fbd8d737867912b6c5f99f56782b8cb81f978a97b4437a1c476de90a3e41c9a1"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0ee1bf613c448997f73fc4efb4ecebebb1c02268028dd4f11f011f02300cf1e8"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:578568c4ba5f2b8abd956baf8b23790dbfdc953e87d5b110bce343b4a54fc9e7"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:a059ad6b80de5b84b9fa02a39400319e62edd39d210b4e4f8c4f1243bdac4752"}, + {file = "multidict-6.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:dd53893675b729a965088aaadd6a1f326a72b83742b056c1065bdd2e2a42b4df"}, + {file = "multidict-6.4.3-cp39-cp39-win32.whl", hash = "sha256:abcfed2c4c139f25c2355e180bcc077a7cae91eefbb8b3927bb3f836c9586f1f"}, + {file = "multidict-6.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:b1b389ae17296dd739015d5ddb222ee99fd66adeae910de21ac950e00979d897"}, + {file = "multidict-6.4.3-py3-none-any.whl", hash = "sha256:59fe01ee8e2a1e8ceb3f6dbb216b09c8d9f4ef1c22c4fc825d045a147fa2ebc9"}, + {file = "multidict-6.4.3.tar.gz", hash = "sha256:3ada0b058c9f213c5f95ba301f922d402ac234f1111a7d8fd70f1b99f3c281ec"}, +] + +[package.dependencies] +typing-extensions = {version = ">=4.1.0", markers = "python_version < \"3.11\""} + +[[package]] +name = "numpy" +version = "2.0.2" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +markers = "python_version < \"3.11\"" +files = [ + {file = "numpy-2.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:51129a29dbe56f9ca83438b706e2e69a39892b5eda6cedcb6b0c9fdc9b0d3ece"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f15975dfec0cf2239224d80e32c3170b1d168335eaedee69da84fbe9f1f9cd04"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:8c5713284ce4e282544c68d1c3b2c7161d38c256d2eefc93c1d683cf47683e66"}, + {file = "numpy-2.0.2-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:becfae3ddd30736fe1889a37f1f580e245ba79a5855bff5f2a29cb3ccc22dd7b"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2da5960c3cf0df7eafefd806d4e612c5e19358de82cb3c343631188991566ccd"}, + {file = "numpy-2.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:496f71341824ed9f3d2fd36cf3ac57ae2e0165c143b55c3a035ee219413f3318"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:a61ec659f68ae254e4d237816e33171497e978140353c0c2038d46e63282d0c8"}, + {file = "numpy-2.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d731a1c6116ba289c1e9ee714b08a8ff882944d4ad631fd411106a30f083c326"}, + {file = "numpy-2.0.2-cp310-cp310-win32.whl", hash = "sha256:984d96121c9f9616cd33fbd0618b7f08e0cfc9600a7ee1d6fd9b239186d19d97"}, + {file = "numpy-2.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:c7b0be4ef08607dd04da4092faee0b86607f111d5ae68036f16cc787e250a131"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:49ca4decb342d66018b01932139c0961a8f9ddc7589611158cb3c27cbcf76448"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:11a76c372d1d37437857280aa142086476136a8c0f373b2e648ab2c8f18fb195"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:807ec44583fd708a21d4a11d94aedf2f4f3c3719035c76a2bbe1fe8e217bdc57"}, + {file = "numpy-2.0.2-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:8cafab480740e22f8d833acefed5cc87ce276f4ece12fdaa2e8903db2f82897a"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a15f476a45e6e5a3a79d8a14e62161d27ad897381fecfa4a09ed5322f2085669"}, + {file = "numpy-2.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:13e689d772146140a252c3a28501da66dfecd77490b498b168b501835041f951"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9ea91dfb7c3d1c56a0e55657c0afb38cf1eeae4544c208dc465c3c9f3a7c09f9"}, + {file = "numpy-2.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c1c9307701fec8f3f7a1e6711f9089c06e6284b3afbbcd259f7791282d660a15"}, + {file = "numpy-2.0.2-cp311-cp311-win32.whl", hash = "sha256:a392a68bd329eafac5817e5aefeb39038c48b671afd242710b451e76090e81f4"}, + {file = "numpy-2.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:286cd40ce2b7d652a6f22efdfc6d1edf879440e53e76a75955bc0c826c7e64dc"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:df55d490dea7934f330006d0f81e8551ba6010a5bf035a249ef61a94f21c500b"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8df823f570d9adf0978347d1f926b2a867d5608f434a7cff7f7908c6570dcf5e"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9a92ae5c14811e390f3767053ff54eaee3bf84576d99a2456391401323f4ec2c"}, + {file = "numpy-2.0.2-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:a842d573724391493a97a62ebbb8e731f8a5dcc5d285dfc99141ca15a3302d0c"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c05e238064fc0610c840d1cf6a13bf63d7e391717d247f1bf0318172e759e692"}, + {file = "numpy-2.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0123ffdaa88fa4ab64835dcbde75dcdf89c453c922f18dced6e27c90d1d0ec5a"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:96a55f64139912d61de9137f11bf39a55ec8faec288c75a54f93dfd39f7eb40c"}, + {file = "numpy-2.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ec9852fb39354b5a45a80bdab5ac02dd02b15f44b3804e9f00c556bf24b4bded"}, + {file = "numpy-2.0.2-cp312-cp312-win32.whl", hash = "sha256:671bec6496f83202ed2d3c8fdc486a8fc86942f2e69ff0e986140339a63bcbe5"}, + {file = "numpy-2.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:cfd41e13fdc257aa5778496b8caa5e856dc4896d4ccf01841daee1d96465467a"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:9059e10581ce4093f735ed23f3b9d283b9d517ff46009ddd485f1747eb22653c"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:423e89b23490805d2a5a96fe40ec507407b8ee786d66f7328be214f9679df6dd"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_arm64.whl", hash = "sha256:2b2955fa6f11907cf7a70dab0d0755159bca87755e831e47932367fc8f2f2d0b"}, + {file = "numpy-2.0.2-cp39-cp39-macosx_14_0_x86_64.whl", hash = "sha256:97032a27bd9d8988b9a97a8c4d2c9f2c15a81f61e2f21404d7e8ef00cb5be729"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e795a8be3ddbac43274f18588329c72939870a16cae810c2b73461c40718ab1"}, + {file = "numpy-2.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f26b258c385842546006213344c50655ff1555a9338e2e5e02a0756dc3e803dd"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:5fec9451a7789926bcf7c2b8d187292c9f93ea30284802a0ab3f5be8ab36865d"}, + {file = "numpy-2.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9189427407d88ff25ecf8f12469d4d39d35bee1db5d39fc5c168c6f088a6956d"}, + {file = "numpy-2.0.2-cp39-cp39-win32.whl", hash = "sha256:905d16e0c60200656500c95b6b8dca5d109e23cb24abc701d41c02d74c6b3afa"}, + {file = "numpy-2.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:a3f4ab0caa7f053f6797fcd4e1e25caee367db3112ef2b6ef82d749530768c73"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:7f0a0c6f12e07fa94133c8a67404322845220c06a9e80e85999afe727f7438b8"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-macosx_14_0_x86_64.whl", hash = "sha256:312950fdd060354350ed123c0e25a71327d3711584beaef30cdaa93320c392d4"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:26df23238872200f63518dd2aa984cfca675d82469535dc7162dc2ee52d9dd5c"}, + {file = "numpy-2.0.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:a46288ec55ebbd58947d31d72be2c63cbf839f0a63b49cb755022310792a3385"}, + {file = "numpy-2.0.2.tar.gz", hash = "sha256:883c987dee1880e2a864ab0dc9892292582510604156762362d9326444636e78"}, +] + +[[package]] +name = "numpy" +version = "2.2.5" +description = "Fundamental package for array computing in Python" +optional = false +python-versions = ">=3.10" +groups = ["main"] +markers = "python_version >= \"3.11\"" +files = [ + {file = "numpy-2.2.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1f4a922da1729f4c40932b2af4fe84909c7a6e167e6e99f71838ce3a29f3fe26"}, + {file = "numpy-2.2.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:b6f91524d31b34f4a5fee24f5bc16dcd1491b668798b6d85585d836c1e633a6a"}, + {file = "numpy-2.2.5-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:19f4718c9012e3baea91a7dba661dcab2451cda2550678dc30d53acb91a7290f"}, + {file = "numpy-2.2.5-cp310-cp310-macosx_14_0_x86_64.whl", hash = "sha256:eb7fd5b184e5d277afa9ec0ad5e4eb562ecff541e7f60e69ee69c8d59e9aeaba"}, + {file = "numpy-2.2.5-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6413d48a9be53e183eb06495d8e3b006ef8f87c324af68241bbe7a39e8ff54c3"}, + {file = "numpy-2.2.5-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7451f92eddf8503c9b8aa4fe6aa7e87fd51a29c2cfc5f7dbd72efde6c65acf57"}, + {file = "numpy-2.2.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0bcb1d057b7571334139129b7f941588f69ce7c4ed15a9d6162b2ea54ded700c"}, + {file = "numpy-2.2.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:36ab5b23915887543441efd0417e6a3baa08634308894316f446027611b53bf1"}, + {file = "numpy-2.2.5-cp310-cp310-win32.whl", hash = "sha256:422cc684f17bc963da5f59a31530b3936f57c95a29743056ef7a7903a5dbdf88"}, + {file = "numpy-2.2.5-cp310-cp310-win_amd64.whl", hash = "sha256:e4f0b035d9d0ed519c813ee23e0a733db81ec37d2e9503afbb6e54ccfdee0fa7"}, + {file = "numpy-2.2.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c42365005c7a6c42436a54d28c43fe0e01ca11eb2ac3cefe796c25a5f98e5e9b"}, + {file = "numpy-2.2.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:498815b96f67dc347e03b719ef49c772589fb74b8ee9ea2c37feae915ad6ebda"}, + {file = "numpy-2.2.5-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:6411f744f7f20081b1b4e7112e0f4c9c5b08f94b9f086e6f0adf3645f85d3a4d"}, + {file = "numpy-2.2.5-cp311-cp311-macosx_14_0_x86_64.whl", hash = "sha256:9de6832228f617c9ef45d948ec1cd8949c482238d68b2477e6f642c33a7b0a54"}, + {file = "numpy-2.2.5-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:369e0d4647c17c9363244f3468f2227d557a74b6781cb62ce57cf3ef5cc7c610"}, + {file = "numpy-2.2.5-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:262d23f383170f99cd9191a7c85b9a50970fe9069b2f8ab5d786eca8a675d60b"}, + {file = "numpy-2.2.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:aa70fdbdc3b169d69e8c59e65c07a1c9351ceb438e627f0fdcd471015cd956be"}, + {file = "numpy-2.2.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37e32e985f03c06206582a7323ef926b4e78bdaa6915095ef08070471865b906"}, + {file = "numpy-2.2.5-cp311-cp311-win32.whl", hash = "sha256:f5045039100ed58fa817a6227a356240ea1b9a1bc141018864c306c1a16d4175"}, + {file = "numpy-2.2.5-cp311-cp311-win_amd64.whl", hash = "sha256:b13f04968b46ad705f7c8a80122a42ae8f620536ea38cf4bdd374302926424dd"}, + {file = "numpy-2.2.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ee461a4eaab4f165b68780a6a1af95fb23a29932be7569b9fab666c407969051"}, + {file = "numpy-2.2.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ec31367fd6a255dc8de4772bd1658c3e926d8e860a0b6e922b615e532d320ddc"}, + {file = "numpy-2.2.5-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:47834cde750d3c9f4e52c6ca28a7361859fcaf52695c7dc3cc1a720b8922683e"}, + {file = "numpy-2.2.5-cp312-cp312-macosx_14_0_x86_64.whl", hash = "sha256:2c1a1c6ccce4022383583a6ded7bbcda22fc635eb4eb1e0a053336425ed36dfa"}, + {file = "numpy-2.2.5-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9d75f338f5f79ee23548b03d801d28a505198297534f62416391857ea0479571"}, + {file = "numpy-2.2.5-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a801fef99668f309b88640e28d261991bfad9617c27beda4a3aec4f217ea073"}, + {file = "numpy-2.2.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:abe38cd8381245a7f49967a6010e77dbf3680bd3627c0fe4362dd693b404c7f8"}, + {file = "numpy-2.2.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5a0ac90e46fdb5649ab6369d1ab6104bfe5854ab19b645bf5cda0127a13034ae"}, + {file = "numpy-2.2.5-cp312-cp312-win32.whl", hash = "sha256:0cd48122a6b7eab8f06404805b1bd5856200e3ed6f8a1b9a194f9d9054631beb"}, + {file = "numpy-2.2.5-cp312-cp312-win_amd64.whl", hash = "sha256:ced69262a8278547e63409b2653b372bf4baff0870c57efa76c5703fd6543282"}, + {file = "numpy-2.2.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:059b51b658f4414fff78c6d7b1b4e18283ab5fa56d270ff212d5ba0c561846f4"}, + {file = "numpy-2.2.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:47f9ed103af0bc63182609044b0490747e03bd20a67e391192dde119bf43d52f"}, + {file = "numpy-2.2.5-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:261a1ef047751bb02f29dfe337230b5882b54521ca121fc7f62668133cb119c9"}, + {file = "numpy-2.2.5-cp313-cp313-macosx_14_0_x86_64.whl", hash = "sha256:4520caa3807c1ceb005d125a75e715567806fed67e315cea619d5ec6e75a4191"}, + {file = "numpy-2.2.5-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3d14b17b9be5f9c9301f43d2e2a4886a33b53f4e6fdf9ca2f4cc60aeeee76372"}, + {file = "numpy-2.2.5-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2ba321813a00e508d5421104464510cc962a6f791aa2fca1c97b1e65027da80d"}, + {file = "numpy-2.2.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4cbdef3ddf777423060c6f81b5694bad2dc9675f110c4b2a60dc0181543fac7"}, + {file = "numpy-2.2.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:54088a5a147ab71a8e7fdfd8c3601972751ded0739c6b696ad9cb0343e21ab73"}, + {file = "numpy-2.2.5-cp313-cp313-win32.whl", hash = "sha256:c8b82a55ef86a2d8e81b63da85e55f5537d2157165be1cb2ce7cfa57b6aef38b"}, + {file = "numpy-2.2.5-cp313-cp313-win_amd64.whl", hash = "sha256:d8882a829fd779f0f43998e931c466802a77ca1ee0fe25a3abe50278616b1471"}, + {file = "numpy-2.2.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:e8b025c351b9f0e8b5436cf28a07fa4ac0204d67b38f01433ac7f9b870fa38c6"}, + {file = "numpy-2.2.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dfa94b6a4374e7851bbb6f35e6ded2120b752b063e6acdd3157e4d2bb922eba"}, + {file = "numpy-2.2.5-cp313-cp313t-macosx_14_0_arm64.whl", hash = "sha256:97c8425d4e26437e65e1d189d22dff4a079b747ff9c2788057bfb8114ce1e133"}, + {file = "numpy-2.2.5-cp313-cp313t-macosx_14_0_x86_64.whl", hash = "sha256:352d330048c055ea6db701130abc48a21bec690a8d38f8284e00fab256dc1376"}, + {file = "numpy-2.2.5-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b4c0773b6ada798f51f0f8e30c054d32304ccc6e9c5d93d46cb26f3d385ab19"}, + {file = "numpy-2.2.5-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:55f09e00d4dccd76b179c0f18a44f041e5332fd0e022886ba1c0bbf3ea4a18d0"}, + {file = "numpy-2.2.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:02f226baeefa68f7d579e213d0f3493496397d8f1cff5e2b222af274c86a552a"}, + {file = "numpy-2.2.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c26843fd58f65da9491165072da2cccc372530681de481ef670dcc8e27cfb066"}, + {file = "numpy-2.2.5-cp313-cp313t-win32.whl", hash = "sha256:1a161c2c79ab30fe4501d5a2bbfe8b162490757cf90b7f05be8b80bc02f7bb8e"}, + {file = "numpy-2.2.5-cp313-cp313t-win_amd64.whl", hash = "sha256:d403c84991b5ad291d3809bace5e85f4bbf44a04bdc9a88ed2bb1807b3360bb8"}, + {file = "numpy-2.2.5-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:b4ea7e1cff6784e58fe281ce7e7f05036b3e1c89c6f922a6bfbc0a7e8768adbe"}, + {file = "numpy-2.2.5-pp310-pypy310_pp73-macosx_14_0_x86_64.whl", hash = "sha256:d7543263084a85fbc09c704b515395398d31d6395518446237eac219eab9e55e"}, + {file = "numpy-2.2.5-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0255732338c4fdd00996c0421884ea8a3651eea555c3a56b84892b66f696eb70"}, + {file = "numpy-2.2.5-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:d2e3bdadaba0e040d1e7ab39db73e0afe2c74ae277f5614dad53eadbecbbb169"}, + {file = "numpy-2.2.5.tar.gz", hash = "sha256:a9c0d994680cd991b1cb772e8b297340085466a6fe964bc9d4e80f5e2f43c291"}, +] + +[[package]] +name = "pandas" +version = "2.2.3" +description = "Powerful data structures for data analysis, time series, and statistics" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pandas-2.2.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1948ddde24197a0f7add2bdc4ca83bf2b1ef84a1bc8ccffd95eda17fd836ecb5"}, + {file = "pandas-2.2.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:381175499d3802cde0eabbaf6324cce0c4f5d52ca6f8c377c29ad442f50f6348"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d9c45366def9a3dd85a6454c0e7908f2b3b8e9c138f5dc38fed7ce720d8453ed"}, + {file = "pandas-2.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:86976a1c5b25ae3f8ccae3a5306e443569ee3c3faf444dfd0f41cda24667ad57"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:b8661b0238a69d7aafe156b7fa86c44b881387509653fdf857bebc5e4008ad42"}, + {file = "pandas-2.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:37e0aced3e8f539eccf2e099f65cdb9c8aa85109b0be6e93e2baff94264bdc6f"}, + {file = "pandas-2.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:56534ce0746a58afaf7942ba4863e0ef81c9c50d3f0ae93e9497d6a41a057645"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66108071e1b935240e74525006034333f98bcdb87ea116de573a6a0dccb6c039"}, + {file = "pandas-2.2.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:7c2875855b0ff77b2a64a0365e24455d9990730d6431b9e0ee18ad8acee13dbd"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd8d0c3be0515c12fed0bdbae072551c8b54b7192c7b1fda0ba56059a0179698"}, + {file = "pandas-2.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c124333816c3a9b03fbeef3a9f230ba9a737e9e5bb4060aa2107a86cc0a497fc"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:63cc132e40a2e084cf01adf0775b15ac515ba905d7dcca47e9a251819c575ef3"}, + {file = "pandas-2.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:29401dbfa9ad77319367d36940cd8a0b3a11aba16063e39632d98b0e931ddf32"}, + {file = "pandas-2.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:3fc6873a41186404dad67245896a6e440baacc92f5b716ccd1bc9ed2995ab2c5"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b1d432e8d08679a40e2a6d8b2f9770a5c21793a6f9f47fdd52c5ce1948a5a8a9"}, + {file = "pandas-2.2.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a5a1595fe639f5988ba6a8e5bc9649af3baf26df3998a0abe56c02609392e0a4"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5de54125a92bb4d1c051c0659e6fcb75256bf799a732a87184e5ea503965bce3"}, + {file = "pandas-2.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fffb8ae78d8af97f849404f21411c95062db1496aeb3e56f146f0355c9989319"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dfcb5ee8d4d50c06a51c2fffa6cff6272098ad6540aed1a76d15fb9318194d8"}, + {file = "pandas-2.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:062309c1b9ea12a50e8ce661145c6aab431b1e99530d3cd60640e255778bd43a"}, + {file = "pandas-2.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:59ef3764d0fe818125a5097d2ae867ca3fa64df032331b7e0917cf5d7bf66b13"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f00d1345d84d8c86a63e476bb4955e46458b304b9575dcf71102b5c705320015"}, + {file = "pandas-2.2.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3508d914817e153ad359d7e069d752cdd736a247c322d932eb89e6bc84217f28"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:22a9d949bfc9a502d320aa04e5d02feab689d61da4e7764b62c30b991c42c5f0"}, + {file = "pandas-2.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f3a255b2c19987fbbe62a9dfd6cff7ff2aa9ccab3fc75218fd4b7530f01efa24"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:800250ecdadb6d9c78eae4990da62743b857b470883fa27f652db8bdde7f6659"}, + {file = "pandas-2.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6374c452ff3ec675a8f46fd9ab25c4ad0ba590b71cf0656f8b6daa5202bca3fb"}, + {file = "pandas-2.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:61c5ad4043f791b61dd4752191d9f07f0ae412515d59ba8f005832a532f8736d"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:3b71f27954685ee685317063bf13c7709a7ba74fc996b84fc6821c59b0f06468"}, + {file = "pandas-2.2.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:38cf8125c40dae9d5acc10fa66af8ea6fdf760b2714ee482ca691fc66e6fcb18"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba96630bc17c875161df3818780af30e43be9b166ce51c9a18c1feae342906c2"}, + {file = "pandas-2.2.3-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1db71525a1538b30142094edb9adc10be3f3e176748cd7acc2240c2f2e5aa3a4"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:15c0e1e02e93116177d29ff83e8b1619c93ddc9c49083f237d4312337a61165d"}, + {file = "pandas-2.2.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:ad5b65698ab28ed8d7f18790a0dc58005c7629f227be9ecc1072aa74c0c1d43a"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:bc6b93f9b966093cb0fd62ff1a7e4c09e6d546ad7c1de191767baffc57628f39"}, + {file = "pandas-2.2.3-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:5dbca4c1acd72e8eeef4753eeca07de9b1db4f398669d5994086f788a5d7cc30"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8cd6d7cc958a3910f934ea8dbdf17b2364827bb4dafc38ce6eef6bb3d65ff09c"}, + {file = "pandas-2.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99df71520d25fade9db7c1076ac94eb994f4d2673ef2aa2e86ee039b6746d20c"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:31d0ced62d4ea3e231a9f228366919a5ea0b07440d9d4dac345376fd8e1477ea"}, + {file = "pandas-2.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7eee9e7cea6adf3e3d24e304ac6b8300646e2a5d1cd3a3c2abed9101b0846761"}, + {file = "pandas-2.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:4850ba03528b6dd51d6c5d273c46f183f39a9baf3f0143e566b89450965b105e"}, + {file = "pandas-2.2.3.tar.gz", hash = "sha256:4f18ba62b61d7e192368b84517265a99b4d7ee8912f8708660fb4a366cc82667"}, +] + +[package.dependencies] +numpy = [ + {version = ">=1.22.4", markers = "python_version < \"3.11\""}, + {version = ">=1.23.2", markers = "python_version == \"3.11\""}, + {version = ">=1.26.0", markers = "python_version >= \"3.12\""}, +] +python-dateutil = ">=2.8.2" +pytz = ">=2020.1" +tzdata = ">=2022.7" + +[package.extras] +all = ["PyQt5 (>=5.15.9)", "SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)", "beautifulsoup4 (>=4.11.2)", "bottleneck (>=1.3.6)", "dataframe-api-compat (>=0.1.7)", "fastparquet (>=2022.12.0)", "fsspec (>=2022.11.0)", "gcsfs (>=2022.11.0)", "html5lib (>=1.1)", "hypothesis (>=6.46.1)", "jinja2 (>=3.1.2)", "lxml (>=4.9.2)", "matplotlib (>=3.6.3)", "numba (>=0.56.4)", "numexpr (>=2.8.4)", "odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "pandas-gbq (>=0.19.0)", "psycopg2 (>=2.9.6)", "pyarrow (>=10.0.1)", "pymysql (>=1.0.2)", "pyreadstat (>=1.2.0)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "qtpy (>=2.3.0)", "s3fs (>=2022.11.0)", "scipy (>=1.10.0)", "tables (>=3.8.0)", "tabulate (>=0.9.0)", "xarray (>=2022.12.0)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)", "zstandard (>=0.19.0)"] +aws = ["s3fs (>=2022.11.0)"] +clipboard = ["PyQt5 (>=5.15.9)", "qtpy (>=2.3.0)"] +compression = ["zstandard (>=0.19.0)"] +computation = ["scipy (>=1.10.0)", "xarray (>=2022.12.0)"] +consortium-standard = ["dataframe-api-compat (>=0.1.7)"] +excel = ["odfpy (>=1.4.1)", "openpyxl (>=3.1.0)", "python-calamine (>=0.1.7)", "pyxlsb (>=1.0.10)", "xlrd (>=2.0.1)", "xlsxwriter (>=3.0.5)"] +feather = ["pyarrow (>=10.0.1)"] +fss = ["fsspec (>=2022.11.0)"] +gcp = ["gcsfs (>=2022.11.0)", "pandas-gbq (>=0.19.0)"] +hdf5 = ["tables (>=3.8.0)"] +html = ["beautifulsoup4 (>=4.11.2)", "html5lib (>=1.1)", "lxml (>=4.9.2)"] +mysql = ["SQLAlchemy (>=2.0.0)", "pymysql (>=1.0.2)"] +output-formatting = ["jinja2 (>=3.1.2)", "tabulate (>=0.9.0)"] +parquet = ["pyarrow (>=10.0.1)"] +performance = ["bottleneck (>=1.3.6)", "numba (>=0.56.4)", "numexpr (>=2.8.4)"] +plot = ["matplotlib (>=3.6.3)"] +postgresql = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "psycopg2 (>=2.9.6)"] +pyarrow = ["pyarrow (>=10.0.1)"] +spss = ["pyreadstat (>=1.2.0)"] +sql-other = ["SQLAlchemy (>=2.0.0)", "adbc-driver-postgresql (>=0.8.0)", "adbc-driver-sqlite (>=0.8.0)"] +test = ["hypothesis (>=6.46.1)", "pytest (>=7.3.2)", "pytest-xdist (>=2.2.0)"] +xml = ["lxml (>=4.9.2)"] + +[[package]] +name = "posthog" +version = "3.25.0" +description = "Integrate PostHog into any python application." +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "posthog-3.25.0-py2.py3-none-any.whl", hash = "sha256:85db78c13d1ecb11aed06fad53759c4e8fb3633442c2f3d0336bc0ce8a585d30"}, + {file = "posthog-3.25.0.tar.gz", hash = "sha256:9168f3e7a0a5571b6b1065c41b3c171fbc68bfe72c3ac0bfd6e3d2fcdb7df2ca"}, +] + +[package.dependencies] +backoff = ">=1.10.0" +distro = ">=1.5.0" +monotonic = ">=1.5" +python-dateutil = ">2.1" +requests = ">=2.7,<3.0" +six = ">=1.5" + +[package.extras] +dev = ["black", "django-stubs", "flake8", "flake8-print", "isort", "lxml", "mypy", "mypy-baseline", "pre-commit", "pydantic", "types-mock", "types-python-dateutil", "types-requests", "types-setuptools", "types-six"] +langchain = ["langchain (>=0.2.0)"] +sentry = ["django", "sentry-sdk"] +test = ["anthropic", "coverage", "django", "flake8", "freezegun (==1.5.1)", "langchain-anthropic (>=0.2.0)", "langchain-community (>=0.2.0)", "langchain-openai (>=0.2.0)", "langgraph", "mock (>=2.0.0)", "openai", "parameterized (>=0.8.1)", "pydantic", "pylint", "pytest", "pytest-asyncio", "pytest-timeout"] + +[[package]] +name = "propcache" +version = "0.3.1" +description = "Accelerated property cache" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f27785888d2fdd918bc36de8b8739f2d6c791399552333721b58193f68ea3e98"}, + {file = "propcache-0.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d4e89cde74154c7b5957f87a355bb9c8ec929c167b59c83d90654ea36aeb6180"}, + {file = "propcache-0.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:730178f476ef03d3d4d255f0c9fa186cb1d13fd33ffe89d39f2cda4da90ceb71"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:967a8eec513dbe08330f10137eacb427b2ca52118769e82ebcfcab0fba92a649"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5b9145c35cc87313b5fd480144f8078716007656093d23059e8993d3a8fa730f"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9e64e948ab41411958670f1093c0a57acfdc3bee5cf5b935671bbd5313bcf229"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:319fa8765bfd6a265e5fa661547556da381e53274bc05094fc9ea50da51bfd46"}, + {file = "propcache-0.3.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c66d8ccbc902ad548312b96ed8d5d266d0d2c6d006fd0f66323e9d8f2dd49be7"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2d219b0dbabe75e15e581fc1ae796109b07c8ba7d25b9ae8d650da582bed01b0"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:cd6a55f65241c551eb53f8cf4d2f4af33512c39da5d9777694e9d9c60872f519"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9979643ffc69b799d50d3a7b72b5164a2e97e117009d7af6dfdd2ab906cb72cd"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4cf9e93a81979f1424f1a3d155213dc928f1069d697e4353edb8a5eba67c6259"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2fce1df66915909ff6c824bbb5eb403d2d15f98f1518e583074671a30fe0c21e"}, + {file = "propcache-0.3.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4d0dfdd9a2ebc77b869a0b04423591ea8823f791293b527dc1bb896c1d6f1136"}, + {file = "propcache-0.3.1-cp310-cp310-win32.whl", hash = "sha256:1f6cc0ad7b4560e5637eb2c994e97b4fa41ba8226069c9277eb5ea7101845b42"}, + {file = "propcache-0.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:47ef24aa6511e388e9894ec16f0fbf3313a53ee68402bc428744a367ec55b833"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:7f30241577d2fef2602113b70ef7231bf4c69a97e04693bde08ddab913ba0ce5"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:43593c6772aa12abc3af7784bff4a41ffa921608dd38b77cf1dfd7f5c4e71371"}, + {file = "propcache-0.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a75801768bbe65499495660b777e018cbe90c7980f07f8aa57d6be79ea6f71da"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f6f1324db48f001c2ca26a25fa25af60711e09b9aaf4b28488602776f4f9a744"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5cdb0f3e1eb6dfc9965d19734d8f9c481b294b5274337a8cb5cb01b462dcb7e0"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1eb34d90aac9bfbced9a58b266f8946cb5935869ff01b164573a7634d39fbcb5"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f35c7070eeec2cdaac6fd3fe245226ed2a6292d3ee8c938e5bb645b434c5f256"}, + {file = "propcache-0.3.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b23c11c2c9e6d4e7300c92e022046ad09b91fd00e36e83c44483df4afa990073"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:3e19ea4ea0bf46179f8a3652ac1426e6dcbaf577ce4b4f65be581e237340420d"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:bd39c92e4c8f6cbf5f08257d6360123af72af9f4da75a690bef50da77362d25f"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b0313e8b923b3814d1c4a524c93dfecea5f39fa95601f6a9b1ac96cd66f89ea0"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e861ad82892408487be144906a368ddbe2dc6297074ade2d892341b35c59844a"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:61014615c1274df8da5991a1e5da85a3ccb00c2d4701ac6f3383afd3ca47ab0a"}, + {file = "propcache-0.3.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:71ebe3fe42656a2328ab08933d420df5f3ab121772eef78f2dc63624157f0ed9"}, + {file = "propcache-0.3.1-cp311-cp311-win32.whl", hash = "sha256:58aa11f4ca8b60113d4b8e32d37e7e78bd8af4d1a5b5cb4979ed856a45e62005"}, + {file = "propcache-0.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:9532ea0b26a401264b1365146c440a6d78269ed41f83f23818d4b79497aeabe7"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:f78eb8422acc93d7b69964012ad7048764bb45a54ba7a39bb9e146c72ea29723"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:89498dd49c2f9a026ee057965cdf8192e5ae070ce7d7a7bd4b66a8e257d0c976"}, + {file = "propcache-0.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:09400e98545c998d57d10035ff623266927cb784d13dd2b31fd33b8a5316b85b"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa8efd8c5adc5a2c9d3b952815ff8f7710cefdcaf5f2c36d26aff51aeca2f12f"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c2fe5c910f6007e716a06d269608d307b4f36e7babee5f36533722660e8c4a70"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a0ab8cf8cdd2194f8ff979a43ab43049b1df0b37aa64ab7eca04ac14429baeb7"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:563f9d8c03ad645597b8d010ef4e9eab359faeb11a0a2ac9f7b4bc8c28ebef25"}, + {file = "propcache-0.3.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb6e0faf8cb6b4beea5d6ed7b5a578254c6d7df54c36ccd3d8b3eb00d6770277"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:1c5c7ab7f2bb3f573d1cb921993006ba2d39e8621019dffb1c5bc94cdbae81e8"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:050b571b2e96ec942898f8eb46ea4bfbb19bd5502424747e83badc2d4a99a44e"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e1c4d24b804b3a87e9350f79e2371a705a188d292fd310e663483af6ee6718ee"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:e4fe2a6d5ce975c117a6bb1e8ccda772d1e7029c1cca1acd209f91d30fa72815"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:feccd282de1f6322f56f6845bf1207a537227812f0a9bf5571df52bb418d79d5"}, + {file = "propcache-0.3.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ec314cde7314d2dd0510c6787326bbffcbdc317ecee6b7401ce218b3099075a7"}, + {file = "propcache-0.3.1-cp312-cp312-win32.whl", hash = "sha256:7d2d5a0028d920738372630870e7d9644ce437142197f8c827194fca404bf03b"}, + {file = "propcache-0.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:88c423efef9d7a59dae0614eaed718449c09a5ac79a5f224a8b9664d603f04a3"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:f1528ec4374617a7a753f90f20e2f551121bb558fcb35926f99e3c42367164b8"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:dc1915ec523b3b494933b5424980831b636fe483d7d543f7afb7b3bf00f0c10f"}, + {file = "propcache-0.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a110205022d077da24e60b3df8bcee73971be9575dec5573dd17ae5d81751111"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d249609e547c04d190e820d0d4c8ca03ed4582bcf8e4e160a6969ddfb57b62e5"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ced33d827625d0a589e831126ccb4f5c29dfdf6766cac441d23995a65825dcb"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4114c4ada8f3181af20808bedb250da6bae56660e4b8dfd9cd95d4549c0962f7"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:975af16f406ce48f1333ec5e912fe11064605d5c5b3f6746969077cc3adeb120"}, + {file = "propcache-0.3.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a34aa3a1abc50740be6ac0ab9d594e274f59960d3ad253cd318af76b996dd654"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9cec3239c85ed15bfaded997773fdad9fb5662b0a7cbc854a43f291eb183179e"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:05543250deac8e61084234d5fc54f8ebd254e8f2b39a16b1dce48904f45b744b"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5cb5918253912e088edbf023788de539219718d3b10aef334476b62d2b53de53"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f3bbecd2f34d0e6d3c543fdb3b15d6b60dd69970c2b4c822379e5ec8f6f621d5"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aca63103895c7d960a5b9b044a83f544b233c95e0dcff114389d64d762017af7"}, + {file = "propcache-0.3.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:5a0a9898fdb99bf11786265468571e628ba60af80dc3f6eb89a3545540c6b0ef"}, + {file = "propcache-0.3.1-cp313-cp313-win32.whl", hash = "sha256:3a02a28095b5e63128bcae98eb59025924f121f048a62393db682f049bf4ac24"}, + {file = "propcache-0.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:813fbb8b6aea2fc9659815e585e548fe706d6f663fa73dff59a1677d4595a037"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:a444192f20f5ce8a5e52761a031b90f5ea6288b1eef42ad4c7e64fef33540b8f"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fbe94666e62ebe36cd652f5fc012abfbc2342de99b523f8267a678e4dfdee3c"}, + {file = "propcache-0.3.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f011f104db880f4e2166bcdcf7f58250f7a465bc6b068dc84c824a3d4a5c94dc"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3e584b6d388aeb0001d6d5c2bd86b26304adde6d9bb9bfa9c4889805021b96de"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8a17583515a04358b034e241f952f1715243482fc2c2945fd99a1b03a0bd77d6"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5aed8d8308215089c0734a2af4f2e95eeb360660184ad3912686c181e500b2e7"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6d8e309ff9a0503ef70dc9a0ebd3e69cf7b3894c9ae2ae81fc10943c37762458"}, + {file = "propcache-0.3.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b655032b202028a582d27aeedc2e813299f82cb232f969f87a4fde491a233f11"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9f64d91b751df77931336b5ff7bafbe8845c5770b06630e27acd5dbb71e1931c"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:19a06db789a4bd896ee91ebc50d059e23b3639c25d58eb35be3ca1cbe967c3bf"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:bef100c88d8692864651b5f98e871fb090bd65c8a41a1cb0ff2322db39c96c27"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:87380fb1f3089d2a0b8b00f006ed12bd41bd858fabfa7330c954c70f50ed8757"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:e474fc718e73ba5ec5180358aa07f6aded0ff5f2abe700e3115c37d75c947e18"}, + {file = "propcache-0.3.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:17d1c688a443355234f3c031349da69444be052613483f3e4158eef751abcd8a"}, + {file = "propcache-0.3.1-cp313-cp313t-win32.whl", hash = "sha256:359e81a949a7619802eb601d66d37072b79b79c2505e6d3fd8b945538411400d"}, + {file = "propcache-0.3.1-cp313-cp313t-win_amd64.whl", hash = "sha256:e7fb9a84c9abbf2b2683fa3e7b0d7da4d8ecf139a1c635732a8bda29c5214b0e"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:ed5f6d2edbf349bd8d630e81f474d33d6ae5d07760c44d33cd808e2f5c8f4ae6"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:668ddddc9f3075af019f784456267eb504cb77c2c4bd46cc8402d723b4d200bf"}, + {file = "propcache-0.3.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0c86e7ceea56376216eba345aa1fc6a8a6b27ac236181f840d1d7e6a1ea9ba5c"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83be47aa4e35b87c106fc0c84c0fc069d3f9b9b06d3c494cd404ec6747544894"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:27c6ac6aa9fc7bc662f594ef380707494cb42c22786a558d95fcdedb9aa5d035"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:64a956dff37080b352c1c40b2966b09defb014347043e740d420ca1eb7c9b908"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:82de5da8c8893056603ac2d6a89eb8b4df49abf1a7c19d536984c8dd63f481d5"}, + {file = "propcache-0.3.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0c3c3a203c375b08fd06a20da3cf7aac293b834b6f4f4db71190e8422750cca5"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b303b194c2e6f171cfddf8b8ba30baefccf03d36a4d9cab7fd0bb68ba476a3d7"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:916cd229b0150129d645ec51614d38129ee74c03293a9f3f17537be0029a9641"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a461959ead5b38e2581998700b26346b78cd98540b5524796c175722f18b0294"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:069e7212890b0bcf9b2be0a03afb0c2d5161d91e1bf51569a64f629acc7defbf"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ef2e4e91fb3945769e14ce82ed53007195e616a63aa43b40fb7ebaaf907c8d4c"}, + {file = "propcache-0.3.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8638f99dca15b9dff328fb6273e09f03d1c50d9b6512f3b65a4154588a7595fe"}, + {file = "propcache-0.3.1-cp39-cp39-win32.whl", hash = "sha256:6f173bbfe976105aaa890b712d1759de339d8a7cef2fc0a1714cc1a1e1c47f64"}, + {file = "propcache-0.3.1-cp39-cp39-win_amd64.whl", hash = "sha256:603f1fe4144420374f1a69b907494c3acbc867a581c2d49d4175b0de7cc64566"}, + {file = "propcache-0.3.1-py3-none-any.whl", hash = "sha256:9a8ecf38de50a7f518c21568c80f985e776397b902f1ce0b01f799aba1608b40"}, + {file = "propcache-0.3.1.tar.gz", hash = "sha256:40d980c33765359098837527e18eddefc9a24cea5b45e078a7f3bb5b032c6ecf"}, +] + +[[package]] +name = "pydantic" +version = "2.11.3" +description = "Data validation using Python type hints" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic-2.11.3-py3-none-any.whl", hash = "sha256:a082753436a07f9ba1289c6ffa01cd93db3548776088aa917cc43b63f68fa60f"}, + {file = "pydantic-2.11.3.tar.gz", hash = "sha256:7471657138c16adad9322fe3070c0116dd6c3ad8d649300e3cbdfe91f4db4ec3"}, +] + +[package.dependencies] +annotated-types = ">=0.6.0" +pydantic-core = "2.33.1" +typing-extensions = ">=4.12.2" +typing-inspection = ">=0.4.0" + +[package.extras] +email = ["email-validator (>=2.0.0)"] +timezone = ["tzdata ; python_version >= \"3.9\" and platform_system == \"Windows\""] + +[[package]] +name = "pydantic-core" +version = "2.33.1" +description = "Core functionality for Pydantic validation and serialization" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_10_12_x86_64.whl", hash = "sha256:3077cfdb6125cc8dab61b155fdd714663e401f0e6883f9632118ec12cf42df26"}, + {file = "pydantic_core-2.33.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8ffab8b2908d152e74862d276cf5017c81a2f3719f14e8e3e8d6b83fda863927"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5183e4f6a2d468787243ebcd70cf4098c247e60d73fb7d68d5bc1e1beaa0c4db"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:398a38d323f37714023be1e0285765f0a27243a8b1506b7b7de87b647b517e48"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:87d3776f0001b43acebfa86f8c64019c043b55cc5a6a2e313d728b5c95b46969"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c566dd9c5f63d22226409553531f89de0cac55397f2ab8d97d6f06cfce6d947e"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a0d5f3acc81452c56895e90643a625302bd6be351e7010664151cc55b7b97f89"}, + {file = "pydantic_core-2.33.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:d3a07fadec2a13274a8d861d3d37c61e97a816beae717efccaa4b36dfcaadcde"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:f99aeda58dce827f76963ee87a0ebe75e648c72ff9ba1174a253f6744f518f65"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_armv7l.whl", hash = "sha256:902dbc832141aa0ec374f4310f1e4e7febeebc3256f00dc359a9ac3f264a45dc"}, + {file = "pydantic_core-2.33.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fe44d56aa0b00d66640aa84a3cbe80b7a3ccdc6f0b1ca71090696a6d4777c091"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win32.whl", hash = "sha256:ed3eb16d51257c763539bde21e011092f127a2202692afaeaccb50db55a31383"}, + {file = "pydantic_core-2.33.1-cp310-cp310-win_amd64.whl", hash = "sha256:694ad99a7f6718c1a498dc170ca430687a39894a60327f548e02a9c7ee4b6504"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_10_12_x86_64.whl", hash = "sha256:6e966fc3caaf9f1d96b349b0341c70c8d6573bf1bac7261f7b0ba88f96c56c24"}, + {file = "pydantic_core-2.33.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bfd0adeee563d59c598ceabddf2c92eec77abcb3f4a391b19aa7366170bd9e30"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:91815221101ad3c6b507804178a7bb5cb7b2ead9ecd600041669c8d805ebd595"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:9fea9c1869bb4742d174a57b4700c6dadea951df8b06de40c2fedb4f02931c2e"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d20eb4861329bb2484c021b9d9a977566ab16d84000a57e28061151c62b349a"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0fb935c5591573ae3201640579f30128ccc10739b45663f93c06796854405505"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c964fd24e6166420d18fb53996d8c9fd6eac9bf5ae3ec3d03015be4414ce497f"}, + {file = "pydantic_core-2.33.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:681d65e9011f7392db5aa002b7423cc442d6a673c635668c227c6c8d0e5a4f77"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e100c52f7355a48413e2999bfb4e139d2977a904495441b374f3d4fb4a170961"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_armv7l.whl", hash = "sha256:048831bd363490be79acdd3232f74a0e9951b11b2b4cc058aeb72b22fdc3abe1"}, + {file = "pydantic_core-2.33.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:bdc84017d28459c00db6f918a7272a5190bec3090058334e43a76afb279eac7c"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win32.whl", hash = "sha256:32cd11c5914d1179df70406427097c7dcde19fddf1418c787540f4b730289896"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_amd64.whl", hash = "sha256:2ea62419ba8c397e7da28a9170a16219d310d2cf4970dbc65c32faf20d828c83"}, + {file = "pydantic_core-2.33.1-cp311-cp311-win_arm64.whl", hash = "sha256:fc903512177361e868bc1f5b80ac8c8a6e05fcdd574a5fb5ffeac5a9982b9e89"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_10_12_x86_64.whl", hash = "sha256:1293d7febb995e9d3ec3ea09caf1a26214eec45b0f29f6074abb004723fc1de8"}, + {file = "pydantic_core-2.33.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:99b56acd433386c8f20be5c4000786d1e7ca0523c8eefc995d14d79c7a081498"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:35a5ec3fa8c2fe6c53e1b2ccc2454398f95d5393ab398478f53e1afbbeb4d939"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:b172f7b9d2f3abc0efd12e3386f7e48b576ef309544ac3a63e5e9cdd2e24585d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9097b9f17f91eea659b9ec58148c0747ec354a42f7389b9d50701610d86f812e"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cc77ec5b7e2118b152b0d886c7514a4653bcb58c6b1d760134a9fab915f777b3"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d5e3d15245b08fa4a84cefc6c9222e6f37c98111c8679fbd94aa145f9a0ae23d"}, + {file = "pydantic_core-2.33.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:ef99779001d7ac2e2461d8ab55d3373fe7315caefdbecd8ced75304ae5a6fc6b"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:fc6bf8869e193855e8d91d91f6bf59699a5cdfaa47a404e278e776dd7f168b39"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_armv7l.whl", hash = "sha256:b1caa0bc2741b043db7823843e1bde8aaa58a55a58fda06083b0569f8b45693a"}, + {file = "pydantic_core-2.33.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:ec259f62538e8bf364903a7d0d0239447059f9434b284f5536e8402b7dd198db"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win32.whl", hash = "sha256:e14f369c98a7c15772b9da98987f58e2b509a93235582838bd0d1d8c08b68fda"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_amd64.whl", hash = "sha256:1c607801d85e2e123357b3893f82c97a42856192997b95b4d8325deb1cd0c5f4"}, + {file = "pydantic_core-2.33.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d13f0276806ee722e70a1c93da19748594f19ac4299c7e41237fc791d1861ea"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_10_12_x86_64.whl", hash = "sha256:70af6a21237b53d1fe7b9325b20e65cbf2f0a848cf77bed492b029139701e66a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:282b3fe1bbbe5ae35224a0dbd05aed9ccabccd241e8e6b60370484234b456266"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4b315e596282bbb5822d0c7ee9d255595bd7506d1cb20c2911a4da0b970187d3"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:1dfae24cf9921875ca0ca6a8ecb4bb2f13c855794ed0d468d6abbec6e6dcd44a"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6dd8ecfde08d8bfadaea669e83c63939af76f4cf5538a72597016edfa3fad516"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2f593494876eae852dc98c43c6f260f45abdbfeec9e4324e31a481d948214764"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:948b73114f47fd7016088e5186d13faf5e1b2fe83f5e320e371f035557fd264d"}, + {file = "pydantic_core-2.33.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e11f3864eb516af21b01e25fac915a82e9ddad3bb0fb9e95a246067398b435a4"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:549150be302428b56fdad0c23c2741dcdb5572413776826c965619a25d9c6bde"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_armv7l.whl", hash = "sha256:495bc156026efafd9ef2d82372bd38afce78ddd82bf28ef5276c469e57c0c83e"}, + {file = "pydantic_core-2.33.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:ec79de2a8680b1a67a07490bddf9636d5c2fab609ba8c57597e855fa5fa4dacd"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win32.whl", hash = "sha256:ee12a7be1742f81b8a65b36c6921022301d466b82d80315d215c4c691724986f"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_amd64.whl", hash = "sha256:ede9b407e39949d2afc46385ce6bd6e11588660c26f80576c11c958e6647bc40"}, + {file = "pydantic_core-2.33.1-cp313-cp313-win_arm64.whl", hash = "sha256:aa687a23d4b7871a00e03ca96a09cad0f28f443690d300500603bd0adba4b523"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:401d7b76e1000d0dd5538e6381d28febdcacb097c8d340dde7d7fc6e13e9f95d"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7aeb055a42d734c0255c9e489ac67e75397d59c6fbe60d155851e9782f276a9c"}, + {file = "pydantic_core-2.33.1-cp313-cp313t-win_amd64.whl", hash = "sha256:338ea9b73e6e109f15ab439e62cb3b78aa752c7fd9536794112e14bee02c8d18"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_10_12_x86_64.whl", hash = "sha256:5ab77f45d33d264de66e1884fca158bc920cb5e27fd0764a72f72f5756ae8bdb"}, + {file = "pydantic_core-2.33.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:e7aaba1b4b03aaea7bb59e1b5856d734be011d3e6d98f5bcaa98cb30f375f2ad"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7fb66263e9ba8fea2aa85e1e5578980d127fb37d7f2e292773e7bc3a38fb0c7b"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:3f2648b9262607a7fb41d782cc263b48032ff7a03a835581abbf7a3bec62bcf5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:723c5630c4259400818b4ad096735a829074601805d07f8cafc366d95786d331"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d100e3ae783d2167782391e0c1c7a20a31f55f8015f3293647544df3f9c67824"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:177d50460bc976a0369920b6c744d927b0ecb8606fb56858ff542560251b19e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:a3edde68d1a1f9af1273b2fe798997b33f90308fb6d44d8550c89fc6a3647cf6"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:a62c3c3ef6a7e2c45f7853b10b5bc4ddefd6ee3cd31024754a1a5842da7d598d"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_armv7l.whl", hash = "sha256:c91dbb0ab683fa0cd64a6e81907c8ff41d6497c346890e26b23de7ee55353f96"}, + {file = "pydantic_core-2.33.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:9f466e8bf0a62dc43e068c12166281c2eca72121dd2adc1040f3aa1e21ef8599"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win32.whl", hash = "sha256:ab0277cedb698749caada82e5d099dc9fed3f906a30d4c382d1a21725777a1e5"}, + {file = "pydantic_core-2.33.1-cp39-cp39-win_amd64.whl", hash = "sha256:5773da0ee2d17136b1f1c6fbde543398d452a6ad2a7b54ea1033e2daa739b8d2"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_10_12_x86_64.whl", hash = "sha256:5c834f54f8f4640fd7e4b193f80eb25a0602bba9e19b3cd2fc7ffe8199f5ae02"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:049e0de24cf23766f12cc5cc71d8abc07d4a9deb9061b334b62093dedc7cb068"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a28239037b3d6f16916a4c831a5a0eadf856bdd6d2e92c10a0da3a59eadcf3e"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9d3da303ab5f378a268fa7d45f37d7d85c3ec19769f28d2cc0c61826a8de21fe"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:25626fb37b3c543818c14821afe0fd3830bc327a43953bc88db924b68c5723f1"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3ab2d36e20fbfcce8f02d73c33a8a7362980cff717926bbae030b93ae46b56c7"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:2f9284e11c751b003fd4215ad92d325d92c9cb19ee6729ebd87e3250072cdcde"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:048c01eee07d37cbd066fc512b9d8b5ea88ceeb4e629ab94b3e56965ad655add"}, + {file = "pydantic_core-2.33.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:5ccd429694cf26af7997595d627dd2637e7932214486f55b8a357edaac9dae8c"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_10_12_x86_64.whl", hash = "sha256:3a371dc00282c4b84246509a5ddc808e61b9864aa1eae9ecc92bb1268b82db4a"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:f59295ecc75a1788af8ba92f2e8c6eeaa5a94c22fc4d151e8d9638814f85c8fc"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:08530b8ac922003033f399128505f513e30ca770527cc8bbacf75a84fcc2c74b"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bae370459da6a5466978c0eacf90690cb57ec9d533f8e63e564ef3822bfa04fe"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:e3de2777e3b9f4d603112f78006f4ae0acb936e95f06da6cb1a45fbad6bdb4b5"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:3a64e81e8cba118e108d7126362ea30e021291b7805d47e4896e52c791be2761"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:52928d8c1b6bda03cc6d811e8923dffc87a2d3c8b3bfd2ce16471c7147a24850"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:1b30d92c9412beb5ac6b10a3eb7ef92ccb14e3f2a8d7732e2d739f58b3aa7544"}, + {file = "pydantic_core-2.33.1-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:f995719707e0e29f0f41a8aa3bcea6e761a36c9136104d3189eafb83f5cec5e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_10_12_x86_64.whl", hash = "sha256:7edbc454a29fc6aeae1e1eecba4f07b63b8d76e76a748532233c4c167b4cb9ea"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:ad05b683963f69a1d5d2c2bdab1274a31221ca737dbbceaa32bcb67359453cdd"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df6a94bf9452c6da9b5d76ed229a5683d0306ccb91cca8e1eea883189780d568"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7965c13b3967909a09ecc91f21d09cfc4576bf78140b988904e94f130f188396"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.whl", hash = "sha256:3f1fdb790440a34f6ecf7679e1863b825cb5ffde858a9197f851168ed08371e5"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_aarch64.whl", hash = "sha256:5277aec8d879f8d05168fdd17ae811dd313b8ff894aeeaf7cd34ad28b4d77e33"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_armv7l.whl", hash = "sha256:8ab581d3530611897d863d1a649fb0644b860286b4718db919bfd51ece41f10b"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-musllinux_1_1_x86_64.whl", hash = "sha256:0483847fa9ad5e3412265c1bd72aad35235512d9ce9d27d81a56d935ef489672"}, + {file = "pydantic_core-2.33.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:de9e06abe3cc5ec6a2d5f75bc99b0bdca4f5c719a5b34026f8c57efbdecd2ee3"}, + {file = "pydantic_core-2.33.1.tar.gz", hash = "sha256:bcc9c6fdb0ced789245b02b7d6603e17d1563064ddcfc36f046b61c0c05dd9df"}, +] + +[package.dependencies] +typing-extensions = ">=4.6.0,<4.7.0 || >4.7.0" + +[[package]] +name = "pyjwt" +version = "2.10.1" +description = "JSON Web Token implementation in Python" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "PyJWT-2.10.1-py3-none-any.whl", hash = "sha256:dcdd193e30abefd5debf142f9adfcdd2b58004e644f25406ffaebd50bd98dacb"}, + {file = "pyjwt-2.10.1.tar.gz", hash = "sha256:3cc5772eb20009233caf06e9d8a0577824723b44e6648ee0a2aedb6cf9381953"}, +] + +[package.extras] +crypto = ["cryptography (>=3.4.0)"] +dev = ["coverage[toml] (==5.0.4)", "cryptography (>=3.4.0)", "pre-commit", "pytest (>=6.0.0,<7.0.0)", "sphinx", "sphinx-rtd-theme", "zope.interface"] +docs = ["sphinx", "sphinx-rtd-theme", "zope.interface"] +tests = ["coverage[toml] (==5.0.4)", "pytest (>=6.0.0,<7.0.0)"] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +description = "Extensions to the standard Python datetime module" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, + {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, +] + +[package.dependencies] +six = ">=1.5" + +[[package]] +name = "python-dotenv" +version = "1.1.0" +description = "Read key-value pairs from a .env file and set them as environment variables" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "python_dotenv-1.1.0-py3-none-any.whl", hash = "sha256:d7c01d9e2293916c18baf562d95698754b0dbbb5e74d457c45d4f6561fb9d55d"}, + {file = "python_dotenv-1.1.0.tar.gz", hash = "sha256:41f90bc6f5f177fb41f53e87666db362025010eb28f60a01c9143bfa33a2b2d5"}, +] + +[package.extras] +cli = ["click (>=5.0)"] + +[[package]] +name = "python-multipart" +version = "0.0.20" +description = "A streaming multipart parser for Python" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "python_multipart-0.0.20-py3-none-any.whl", hash = "sha256:8a62d3a8335e06589fe01f2a3e178cdcc632f3fbe0d492ad9ee0ec35aab1f104"}, + {file = "python_multipart-0.0.20.tar.gz", hash = "sha256:8dd0cab45b8e23064ae09147625994d090fa46f5b0d1e13af944c331a7fa9d13"}, +] + +[[package]] +name = "pytz" +version = "2025.2" +description = "World timezone definitions, modern and historical" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, + {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, +] + +[[package]] +name = "requests" +version = "2.32.3" +description = "Python HTTP for Humans." +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, + {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, +] + +[package.dependencies] +certifi = ">=2017.4.17" +charset-normalizer = ">=2,<4" +idna = ">=2.5,<4" +urllib3 = ">=1.21.1,<3" + +[package.extras] +socks = ["PySocks (>=1.5.6,!=1.5.7)"] +use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] + +[[package]] +name = "ruff" +version = "0.7.4" +description = "An extremely fast Python linter and code formatter, written in Rust." +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "ruff-0.7.4-py3-none-linux_armv6l.whl", hash = "sha256:a4919925e7684a3f18e18243cd6bea7cfb8e968a6eaa8437971f681b7ec51478"}, + {file = "ruff-0.7.4-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:cfb365c135b830778dda8c04fb7d4280ed0b984e1aec27f574445231e20d6c63"}, + {file = "ruff-0.7.4-py3-none-macosx_11_0_arm64.whl", hash = "sha256:63a569b36bc66fbadec5beaa539dd81e0527cb258b94e29e0531ce41bacc1f20"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0d06218747d361d06fd2fdac734e7fa92df36df93035db3dc2ad7aa9852cb109"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e0cea28d0944f74ebc33e9f934238f15c758841f9f5edd180b5315c203293452"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:80094ecd4793c68b2571b128f91754d60f692d64bc0d7272ec9197fdd09bf9ea"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:997512325c6620d1c4c2b15db49ef59543ef9cd0f4aa8065ec2ae5103cedc7e7"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:00b4cf3a6b5fad6d1a66e7574d78956bbd09abfd6c8a997798f01f5da3d46a05"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7dbdc7d8274e1422722933d1edddfdc65b4336abf0b16dfcb9dedd6e6a517d06"}, + {file = "ruff-0.7.4-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e92dfb5f00eaedb1501b2f906ccabfd67b2355bdf117fea9719fc99ac2145bc"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:3bd726099f277d735dc38900b6a8d6cf070f80828877941983a57bca1cd92172"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:2e32829c429dd081ee5ba39aef436603e5b22335c3d3fff013cd585806a6486a"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_i686.whl", hash = "sha256:662a63b4971807623f6f90c1fb664613f67cc182dc4d991471c23c541fee62dd"}, + {file = "ruff-0.7.4-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:876f5e09eaae3eb76814c1d3b68879891d6fde4824c015d48e7a7da4cf066a3a"}, + {file = "ruff-0.7.4-py3-none-win32.whl", hash = "sha256:75c53f54904be42dd52a548728a5b572344b50d9b2873d13a3f8c5e3b91f5cac"}, + {file = "ruff-0.7.4-py3-none-win_amd64.whl", hash = "sha256:745775c7b39f914238ed1f1b0bebed0b9155a17cd8bc0b08d3c87e4703b990d6"}, + {file = "ruff-0.7.4-py3-none-win_arm64.whl", hash = "sha256:11bff065102c3ae9d3ea4dc9ecdfe5a5171349cdd0787c1fc64761212fc9cf1f"}, + {file = "ruff-0.7.4.tar.gz", hash = "sha256:cd12e35031f5af6b9b93715d8c4f40360070b2041f81273d0527683d5708fce2"}, +] + +[[package]] +name = "six" +version = "1.17.0" +description = "Python 2 and 3 compatibility utilities" +optional = false +python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" +groups = ["main"] +files = [ + {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, + {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, +] + +[[package]] +name = "sniffio" +version = "1.3.1" +description = "Sniff out which async library your code is running under" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, + {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, +] + +[[package]] +name = "starlette" +version = "0.46.2" +description = "The little ASGI library that shines." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "starlette-0.46.2-py3-none-any.whl", hash = "sha256:595633ce89f8ffa71a015caed34a5b2dc1c0cdb3f0f1fbd1e69339cf2abeec35"}, + {file = "starlette-0.46.2.tar.gz", hash = "sha256:7f7361f34eed179294600af672f565727419830b54b7b084efe44bb82d2fccd5"}, +] + +[package.dependencies] +anyio = ">=3.6.2,<5" +typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} + +[package.extras] +full = ["httpx (>=0.27.0,<0.29.0)", "itsdangerous", "jinja2", "python-multipart (>=0.0.18)", "pyyaml"] + +[[package]] +name = "typing-extensions" +version = "4.13.2" +description = "Backported and Experimental Type Hints for Python 3.8+" +optional = false +python-versions = ">=3.8" +groups = ["main"] +files = [ + {file = "typing_extensions-4.13.2-py3-none-any.whl", hash = "sha256:a439e7c04b49fec3e5d3e2beaa21755cadbbdc391694e28ccdd36ca4a1408f8c"}, + {file = "typing_extensions-4.13.2.tar.gz", hash = "sha256:e6c81219bd689f51865d9e372991c540bda33a0379d5573cddb9a3a23f7caaef"}, +] + +[[package]] +name = "typing-inspection" +version = "0.4.0" +description = "Runtime typing introspection tools" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "typing_inspection-0.4.0-py3-none-any.whl", hash = "sha256:50e72559fcd2a6367a19f7a7e610e6afcb9fac940c650290eed893d61386832f"}, + {file = "typing_inspection-0.4.0.tar.gz", hash = "sha256:9765c87de36671694a67904bf2c96e395be9c6439bb6c87b5142569dcdd65122"}, +] + +[package.dependencies] +typing-extensions = ">=4.12.0" + +[[package]] +name = "tzdata" +version = "2025.2" +description = "Provider of IANA time zone data" +optional = false +python-versions = ">=2" +groups = ["main"] +files = [ + {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, + {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, +] + +[[package]] +name = "urllib3" +version = "2.4.0" +description = "HTTP library with thread-safe connection pooling, file post, and more." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "urllib3-2.4.0-py3-none-any.whl", hash = "sha256:4e16665048960a0900c702d4a66415956a584919c03361cac9f1df5c5dd7e813"}, + {file = "urllib3-2.4.0.tar.gz", hash = "sha256:414bc6535b787febd7567804cc015fee39daab8ad86268f1310a9250697de466"}, +] + +[package.extras] +brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] +h2 = ["h2 (>=4,<5)"] +socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] +zstd = ["zstandard (>=0.18.0)"] + +[[package]] +name = "uuid7" +version = "0.1.0" +description = "UUID version 7, generating time-sorted UUIDs with 200ns time resolution and 48 bits of randomness" +optional = false +python-versions = ">=3.7" +groups = ["main"] +files = [ + {file = "uuid7-0.1.0-py2.py3-none-any.whl", hash = "sha256:5e259bb63c8cb4aded5927ff41b444a80d0c7124e8a0ced7cf44efa1f5cccf61"}, + {file = "uuid7-0.1.0.tar.gz", hash = "sha256:8c57aa32ee7456d3cc68c95c4530bc571646defac01895cfc73545449894a63c"}, +] + +[[package]] +name = "uvicorn" +version = "0.34.2" +description = "The lightning-fast ASGI server." +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "uvicorn-0.34.2-py3-none-any.whl", hash = "sha256:deb49af569084536d269fe0a6d67e3754f104cf03aba7c11c40f01aadf33c403"}, + {file = "uvicorn-0.34.2.tar.gz", hash = "sha256:0e929828f6186353a80b58ea719861d2629d766293b6d19baf086ba31d4f3328"}, +] + +[package.dependencies] +click = ">=7.0" +h11 = ">=0.8" +typing-extensions = {version = ">=4.0", markers = "python_version < \"3.11\""} + +[package.extras] +standard = ["colorama (>=0.4) ; sys_platform == \"win32\"", "httptools (>=0.6.3)", "python-dotenv (>=0.13)", "pyyaml (>=5.1)", "uvloop (>=0.14.0,!=0.15.0,!=0.15.1) ; sys_platform != \"win32\" and sys_platform != \"cygwin\" and platform_python_implementation != \"PyPy\"", "watchfiles (>=0.13)", "websockets (>=10.4)"] + +[[package]] +name = "webencodings" +version = "0.5.1" +description = "Character encoding aliases for legacy web content" +optional = false +python-versions = "*" +groups = ["main"] +files = [ + {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, + {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, +] + +[[package]] +name = "websockets" +version = "15.0.1" +description = "An implementation of the WebSocket Protocol (RFC 6455 & 7692)" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:d63efaa0cd96cf0c5fe4d581521d9fa87744540d4bc999ae6e08595a1014b45b"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:ac60e3b188ec7574cb761b08d50fcedf9d77f1530352db4eef1707fe9dee7205"}, + {file = "websockets-15.0.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5756779642579d902eed757b21b0164cd6fe338506a8083eb58af5c372e39d9a"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fdfe3e2a29e4db3659dbd5bbf04560cea53dd9610273917799f1cde46aa725e"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c2529b320eb9e35af0fa3016c187dffb84a3ecc572bcee7c3ce302bfeba52bf"}, + {file = "websockets-15.0.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac1e5c9054fe23226fb11e05a6e630837f074174c4c2f0fe442996112a6de4fb"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:5df592cd503496351d6dc14f7cdad49f268d8e618f80dce0cd5a36b93c3fc08d"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0a34631031a8f05657e8e90903e656959234f3a04552259458aac0b0f9ae6fd9"}, + {file = "websockets-15.0.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3d00075aa65772e7ce9e990cab3ff1de702aa09be3940d1dc88d5abf1ab8a09c"}, + {file = "websockets-15.0.1-cp310-cp310-win32.whl", hash = "sha256:1234d4ef35db82f5446dca8e35a7da7964d02c127b095e172e54397fb6a6c256"}, + {file = "websockets-15.0.1-cp310-cp310-win_amd64.whl", hash = "sha256:39c1fec2c11dc8d89bba6b2bf1556af381611a173ac2b511cf7231622058af41"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:823c248b690b2fd9303ba00c4f66cd5e2d8c3ba4aa968b2779be9532a4dad431"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678999709e68425ae2593acf2e3ebcbcf2e69885a5ee78f9eb80e6e371f1bf57"}, + {file = "websockets-15.0.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d50fd1ee42388dcfb2b3676132c78116490976f1300da28eb629272d5d93e905"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d99e5546bf73dbad5bf3547174cd6cb8ba7273062a23808ffea025ecb1cf8562"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:66dd88c918e3287efc22409d426c8f729688d89a0c587c88971a0faa2c2f3792"}, + {file = "websockets-15.0.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8dd8327c795b3e3f219760fa603dcae1dcc148172290a8ab15158cf85a953413"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8fdc51055e6ff4adeb88d58a11042ec9a5eae317a0a53d12c062c8a8865909e8"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:693f0192126df6c2327cce3baa7c06f2a117575e32ab2308f7f8216c29d9e2e3"}, + {file = "websockets-15.0.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:54479983bd5fb469c38f2f5c7e3a24f9a4e70594cd68cd1fa6b9340dadaff7cf"}, + {file = "websockets-15.0.1-cp311-cp311-win32.whl", hash = "sha256:16b6c1b3e57799b9d38427dda63edcbe4926352c47cf88588c0be4ace18dac85"}, + {file = "websockets-15.0.1-cp311-cp311-win_amd64.whl", hash = "sha256:27ccee0071a0e75d22cb35849b1db43f2ecd3e161041ac1ee9d2352ddf72f065"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:3e90baa811a5d73f3ca0bcbf32064d663ed81318ab225ee4f427ad4e26e5aff3"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:592f1a9fe869c778694f0aa806ba0374e97648ab57936f092fd9d87f8bc03665"}, + {file = "websockets-15.0.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0701bc3cfcb9164d04a14b149fd74be7347a530ad3bbf15ab2c678a2cd3dd9a2"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e8b56bdcdb4505c8078cb6c7157d9811a85790f2f2b3632c7d1462ab5783d215"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0af68c55afbd5f07986df82831c7bff04846928ea8d1fd7f30052638788bc9b5"}, + {file = "websockets-15.0.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64dee438fed052b52e4f98f76c5790513235efaa1ef7f3f2192c392cd7c91b65"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:d5f6b181bb38171a8ad1d6aa58a67a6aa9d4b38d0f8c5f496b9e42561dfc62fe"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:5d54b09eba2bada6011aea5375542a157637b91029687eb4fdb2dab11059c1b4"}, + {file = "websockets-15.0.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:3be571a8b5afed347da347bfcf27ba12b069d9d7f42cb8c7028b5e98bbb12597"}, + {file = "websockets-15.0.1-cp312-cp312-win32.whl", hash = "sha256:c338ffa0520bdb12fbc527265235639fb76e7bc7faafbb93f6ba80d9c06578a9"}, + {file = "websockets-15.0.1-cp312-cp312-win_amd64.whl", hash = "sha256:fcd5cf9e305d7b8338754470cf69cf81f420459dbae8a3b40cee57417f4614a7"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ee443ef070bb3b6ed74514f5efaa37a252af57c90eb33b956d35c8e9c10a1931"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5a939de6b7b4e18ca683218320fc67ea886038265fd1ed30173f5ce3f8e85675"}, + {file = "websockets-15.0.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:746ee8dba912cd6fc889a8147168991d50ed70447bf18bcda7039f7d2e3d9151"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:595b6c3969023ecf9041b2936ac3827e4623bfa3ccf007575f04c5a6aa318c22"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c714d2fc58b5ca3e285461a4cc0c9a66bd0e24c5da9911e30158286c9b5be7f"}, + {file = "websockets-15.0.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f3c1e2ab208db911594ae5b4f79addeb3501604a165019dd221c0bdcabe4db8"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:229cf1d3ca6c1804400b0a9790dc66528e08a6a1feec0d5040e8b9eb14422375"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:756c56e867a90fb00177d530dca4b097dd753cde348448a1012ed6c5131f8b7d"}, + {file = "websockets-15.0.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:558d023b3df0bffe50a04e710bc87742de35060580a293c2a984299ed83bc4e4"}, + {file = "websockets-15.0.1-cp313-cp313-win32.whl", hash = "sha256:ba9e56e8ceeeedb2e080147ba85ffcd5cd0711b89576b83784d8605a7df455fa"}, + {file = "websockets-15.0.1-cp313-cp313-win_amd64.whl", hash = "sha256:e09473f095a819042ecb2ab9465aee615bd9c2028e4ef7d933600a8401c79561"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:5f4c04ead5aed67c8a1a20491d54cdfba5884507a48dd798ecaf13c74c4489f5"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:abdc0c6c8c648b4805c5eacd131910d2a7f6455dfd3becab248ef108e89ab16a"}, + {file = "websockets-15.0.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a625e06551975f4b7ea7102bc43895b90742746797e2e14b70ed61c43a90f09b"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d591f8de75824cbb7acad4e05d2d710484f15f29d4a915092675ad3456f11770"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:47819cea040f31d670cc8d324bb6435c6f133b8c7a19ec3d61634e62f8d8f9eb"}, + {file = "websockets-15.0.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac017dd64572e5c3bd01939121e4d16cf30e5d7e110a119399cf3133b63ad054"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4a9fac8e469d04ce6c25bb2610dc535235bd4aa14996b4e6dbebf5e007eba5ee"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363c6f671b761efcb30608d24925a382497c12c506b51661883c3e22337265ed"}, + {file = "websockets-15.0.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2034693ad3097d5355bfdacfffcbd3ef5694f9718ab7f29c29689a9eae841880"}, + {file = "websockets-15.0.1-cp39-cp39-win32.whl", hash = "sha256:3b1ac0d3e594bf121308112697cf4b32be538fb1444468fb0a6ae4feebc83411"}, + {file = "websockets-15.0.1-cp39-cp39-win_amd64.whl", hash = "sha256:b7643a03db5c95c799b89b31c036d5f27eeb4d259c798e878d6937d71832b1e4"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:0c9e74d766f2818bb95f84c25be4dea09841ac0f734d1966f415e4edfc4ef1c3"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:1009ee0c7739c08a0cd59de430d6de452a55e42d6b522de7aa15e6f67db0b8e1"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:76d1f20b1c7a2fa82367e04982e708723ba0e7b8d43aa643d3dcd404d74f1475"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f29d80eb9a9263b8d109135351caf568cc3f80b9928bccde535c235de55c22d9"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b359ed09954d7c18bbc1680f380c7301f92c60bf924171629c5db97febb12f04"}, + {file = "websockets-15.0.1-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:cad21560da69f4ce7658ca2cb83138fb4cf695a2ba3e475e0559e05991aa8122"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7f493881579c90fc262d9cdbaa05a6b54b3811c2f300766748db79f098db9940"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:47b099e1f4fbc95b701b6e85768e1fcdaf1630f3cbe4765fa216596f12310e2e"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:67f2b6de947f8c757db2db9c71527933ad0019737ec374a8a6be9a956786aaf9"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d08eb4c2b7d6c41da6ca0600c077e93f5adcfd979cd777d747e9ee624556da4b"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4b826973a4a2ae47ba357e4e82fa44a463b8f168e1ca775ac64521442b19e87f"}, + {file = "websockets-15.0.1-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:21c1fa28a6a7e3cbdc171c694398b6df4744613ce9b36b1a498e816787e28123"}, + {file = "websockets-15.0.1-py3-none-any.whl", hash = "sha256:f7a866fbc1e97b5c617ee4116daaa09b722101d4a3c170c787450ba409f9736f"}, + {file = "websockets-15.0.1.tar.gz", hash = "sha256:82544de02076bafba038ce055ee6412d68da13ab47f0c60cab827346de828dee"}, +] + +[[package]] +name = "yarl" +version = "1.20.0" +description = "Yet another URL library" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "yarl-1.20.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:f1f6670b9ae3daedb325fa55fbe31c22c8228f6e0b513772c2e1c623caa6ab22"}, + {file = "yarl-1.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85a231fa250dfa3308f3c7896cc007a47bc76e9e8e8595c20b7426cac4884c62"}, + {file = "yarl-1.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:1a06701b647c9939d7019acdfa7ebbfbb78ba6aa05985bb195ad716ea759a569"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7595498d085becc8fb9203aa314b136ab0516c7abd97e7d74f7bb4eb95042abe"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:af5607159085dcdb055d5678fc2d34949bd75ae6ea6b4381e784bbab1c3aa195"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:95b50910e496567434cb77a577493c26bce0f31c8a305135f3bda6a2483b8e10"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b594113a301ad537766b4e16a5a6750fcbb1497dcc1bc8a4daae889e6402a634"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:083ce0393ea173cd37834eb84df15b6853b555d20c52703e21fbababa8c129d2"}, + {file = "yarl-1.20.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4f1a350a652bbbe12f666109fbddfdf049b3ff43696d18c9ab1531fbba1c977a"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fb0caeac4a164aadce342f1597297ec0ce261ec4532bbc5a9ca8da5622f53867"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:d88cc43e923f324203f6ec14434fa33b85c06d18d59c167a0637164863b8e995"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e52d6ed9ea8fd3abf4031325dc714aed5afcbfa19ee4a89898d663c9976eb487"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ce360ae48a5e9961d0c730cf891d40698a82804e85f6e74658fb175207a77cb2"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:06d06c9d5b5bc3eb56542ceeba6658d31f54cf401e8468512447834856fb0e61"}, + {file = "yarl-1.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c27d98f4e5c4060582f44e58309c1e55134880558f1add7a87c1bc36ecfade19"}, + {file = "yarl-1.20.0-cp310-cp310-win32.whl", hash = "sha256:f4d3fa9b9f013f7050326e165c3279e22850d02ae544ace285674cb6174b5d6d"}, + {file = "yarl-1.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:bc906b636239631d42eb8a07df8359905da02704a868983265603887ed68c076"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:fdb5204d17cb32b2de2d1e21c7461cabfacf17f3645e4b9039f210c5d3378bf3"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eaddd7804d8e77d67c28d154ae5fab203163bd0998769569861258e525039d2a"}, + {file = "yarl-1.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:634b7ba6b4a85cf67e9df7c13a7fb2e44fa37b5d34501038d174a63eaac25ee2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6d409e321e4addf7d97ee84162538c7258e53792eb7c6defd0c33647d754172e"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:ea52f7328a36960ba3231c6677380fa67811b414798a6e071c7085c57b6d20a9"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c8703517b924463994c344dcdf99a2d5ce9eca2b6882bb640aa555fb5efc706a"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:077989b09ffd2f48fb2d8f6a86c5fef02f63ffe6b1dd4824c76de7bb01e4f2e2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0acfaf1da020253f3533526e8b7dd212838fdc4109959a2c53cafc6db611bff2"}, + {file = "yarl-1.20.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b4230ac0b97ec5eeb91d96b324d66060a43fd0d2a9b603e3327ed65f084e41f8"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:0a6a1e6ae21cdd84011c24c78d7a126425148b24d437b5702328e4ba640a8902"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:86de313371ec04dd2531f30bc41a5a1a96f25a02823558ee0f2af0beaa7ca791"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:dd59c9dd58ae16eaa0f48c3d0cbe6be8ab4dc7247c3ff7db678edecbaf59327f"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:a0bc5e05f457b7c1994cc29e83b58f540b76234ba6b9648a4971ddc7f6aa52da"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:c9471ca18e6aeb0e03276b5e9b27b14a54c052d370a9c0c04a68cefbd1455eb4"}, + {file = "yarl-1.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:40ed574b4df723583a26c04b298b283ff171bcc387bc34c2683235e2487a65a5"}, + {file = "yarl-1.20.0-cp311-cp311-win32.whl", hash = "sha256:db243357c6c2bf3cd7e17080034ade668d54ce304d820c2a58514a4e51d0cfd6"}, + {file = "yarl-1.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:8c12cd754d9dbd14204c328915e23b0c361b88f3cffd124129955e60a4fbfcfb"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e06b9f6cdd772f9b665e5ba8161968e11e403774114420737f7884b5bd7bdf6f"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b9ae2fbe54d859b3ade40290f60fe40e7f969d83d482e84d2c31b9bff03e359e"}, + {file = "yarl-1.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6d12b8945250d80c67688602c891237994d203d42427cb14e36d1a732eda480e"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:087e9731884621b162a3e06dc0d2d626e1542a617f65ba7cc7aeab279d55ad33"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:69df35468b66c1a6e6556248e6443ef0ec5f11a7a4428cf1f6281f1879220f58"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3b2992fe29002fd0d4cbaea9428b09af9b8686a9024c840b8a2b8f4ea4abc16f"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4c903e0b42aab48abfbac668b5a9d7b6938e721a6341751331bcd7553de2dcae"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bf099e2432131093cc611623e0b0bcc399b8cddd9a91eded8bfb50402ec35018"}, + {file = "yarl-1.20.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8a7f62f5dc70a6c763bec9ebf922be52aa22863d9496a9a30124d65b489ea672"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:54ac15a8b60382b2bcefd9a289ee26dc0920cf59b05368c9b2b72450751c6eb8"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:25b3bc0763a7aca16a0f1b5e8ef0f23829df11fb539a1b70476dcab28bd83da7"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b2586e36dc070fc8fad6270f93242124df68b379c3a251af534030a4a33ef594"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:866349da9d8c5290cfefb7fcc47721e94de3f315433613e01b435473be63daa6"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:33bb660b390a0554d41f8ebec5cd4475502d84104b27e9b42f5321c5192bfcd1"}, + {file = "yarl-1.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:737e9f171e5a07031cbee5e9180f6ce21a6c599b9d4b2c24d35df20a52fabf4b"}, + {file = "yarl-1.20.0-cp312-cp312-win32.whl", hash = "sha256:839de4c574169b6598d47ad61534e6981979ca2c820ccb77bf70f4311dd2cc64"}, + {file = "yarl-1.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:3d7dbbe44b443b0c4aa0971cb07dcb2c2060e4a9bf8d1301140a33a93c98e18c"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:2137810a20b933b1b1b7e5cf06a64c3ed3b4747b0e5d79c9447c00db0e2f752f"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:447c5eadd750db8389804030d15f43d30435ed47af1313303ed82a62388176d3"}, + {file = "yarl-1.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:42fbe577272c203528d402eec8bf4b2d14fd49ecfec92272334270b850e9cd7d"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:18e321617de4ab170226cd15006a565d0fa0d908f11f724a2c9142d6b2812ab0"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:4345f58719825bba29895011e8e3b545e6e00257abb984f9f27fe923afca2501"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5d9b980d7234614bc4674468ab173ed77d678349c860c3af83b1fffb6a837ddc"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:af4baa8a445977831cbaa91a9a84cc09debb10bc8391f128da2f7bd070fc351d"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:123393db7420e71d6ce40d24885a9e65eb1edefc7a5228db2d62bcab3386a5c0"}, + {file = "yarl-1.20.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ab47acc9332f3de1b39e9b702d9c916af7f02656b2a86a474d9db4e53ef8fd7a"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:4a34c52ed158f89876cba9c600b2c964dfc1ca52ba7b3ab6deb722d1d8be6df2"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:04d8cfb12714158abf2618f792c77bc5c3d8c5f37353e79509608be4f18705c9"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:7dc63ad0d541c38b6ae2255aaa794434293964677d5c1ec5d0116b0e308031f5"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f9d02b591a64e4e6ca18c5e3d925f11b559c763b950184a64cf47d74d7e41877"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:95fc9876f917cac7f757df80a5dda9de59d423568460fe75d128c813b9af558e"}, + {file = "yarl-1.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb769ae5760cd1c6a712135ee7915f9d43f11d9ef769cb3f75a23e398a92d384"}, + {file = "yarl-1.20.0-cp313-cp313-win32.whl", hash = "sha256:70e0c580a0292c7414a1cead1e076c9786f685c1fc4757573d2967689b370e62"}, + {file = "yarl-1.20.0-cp313-cp313-win_amd64.whl", hash = "sha256:4c43030e4b0af775a85be1fa0433119b1565673266a70bf87ef68a9d5ba3174c"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b6c4c3d0d6a0ae9b281e492b1465c72de433b782e6b5001c8e7249e085b69051"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:8681700f4e4df891eafa4f69a439a6e7d480d64e52bf460918f58e443bd3da7d"}, + {file = "yarl-1.20.0-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:84aeb556cb06c00652dbf87c17838eb6d92cfd317799a8092cee0e570ee11229"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f166eafa78810ddb383e930d62e623d288fb04ec566d1b4790099ae0f31485f1"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:5d3d6d14754aefc7a458261027a562f024d4f6b8a798adb472277f675857b1eb"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2a8f64df8ed5d04c51260dbae3cc82e5649834eebea9eadfd829837b8093eb00"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4d9949eaf05b4d30e93e4034a7790634bbb41b8be2d07edd26754f2e38e491de"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c366b254082d21cc4f08f522ac201d0d83a8b8447ab562732931d31d80eb2a5"}, + {file = "yarl-1.20.0-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:91bc450c80a2e9685b10e34e41aef3d44ddf99b3a498717938926d05ca493f6a"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9c2aa4387de4bc3a5fe158080757748d16567119bef215bec643716b4fbf53f9"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_armv7l.whl", hash = "sha256:d2cbca6760a541189cf87ee54ff891e1d9ea6406079c66341008f7ef6ab61145"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:798a5074e656f06b9fad1a162be5a32da45237ce19d07884d0b67a0aa9d5fdda"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:f106e75c454288472dbe615accef8248c686958c2e7dd3b8d8ee2669770d020f"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_s390x.whl", hash = "sha256:3b60a86551669c23dc5445010534d2c5d8a4e012163218fc9114e857c0586fdd"}, + {file = "yarl-1.20.0-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:3e429857e341d5e8e15806118e0294f8073ba9c4580637e59ab7b238afca836f"}, + {file = "yarl-1.20.0-cp313-cp313t-win32.whl", hash = "sha256:65a4053580fe88a63e8e4056b427224cd01edfb5f951498bfefca4052f0ce0ac"}, + {file = "yarl-1.20.0-cp313-cp313t-win_amd64.whl", hash = "sha256:53b2da3a6ca0a541c1ae799c349788d480e5144cac47dba0266c7cb6c76151fe"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:119bca25e63a7725b0c9d20ac67ca6d98fa40e5a894bd5d4686010ff73397914"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:35d20fb919546995f1d8c9e41f485febd266f60e55383090010f272aca93edcc"}, + {file = "yarl-1.20.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:484e7a08f72683c0f160270566b4395ea5412b4359772b98659921411d32ad26"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8d8a3d54a090e0fff5837cd3cc305dd8a07d3435a088ddb1f65e33b322f66a94"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:f0cf05ae2d3d87a8c9022f3885ac6dea2b751aefd66a4f200e408a61ae9b7f0d"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a884b8974729e3899d9287df46f015ce53f7282d8d3340fa0ed57536b440621c"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f8d8aa8dd89ffb9a831fedbcb27d00ffd9f4842107d52dc9d57e64cb34073d5c"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4e88d6c3c8672f45a30867817e4537df1bbc6f882a91581faf1f6d9f0f1b5a"}, + {file = "yarl-1.20.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bdb77efde644d6f1ad27be8a5d67c10b7f769804fff7a966ccb1da5a4de4b656"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4ba5e59f14bfe8d261a654278a0f6364feef64a794bd456a8c9e823071e5061c"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_armv7l.whl", hash = "sha256:d0bf955b96ea44ad914bc792c26a0edcd71b4668b93cbcd60f5b0aeaaed06c64"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:27359776bc359ee6eaefe40cb19060238f31228799e43ebd3884e9c589e63b20"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:04d9c7a1dc0a26efb33e1acb56c8849bd57a693b85f44774356c92d610369efa"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:faa709b66ae0e24c8e5134033187a972d849d87ed0a12a0366bedcc6b5dc14a5"}, + {file = "yarl-1.20.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:44869ee8538208fe5d9342ed62c11cc6a7a1af1b3d0bb79bb795101b6e77f6e0"}, + {file = "yarl-1.20.0-cp39-cp39-win32.whl", hash = "sha256:b7fa0cb9fd27ffb1211cde944b41f5c67ab1c13a13ebafe470b1e206b8459da8"}, + {file = "yarl-1.20.0-cp39-cp39-win_amd64.whl", hash = "sha256:d4fad6e5189c847820288286732075f213eabf81be4d08d6cc309912e62be5b7"}, + {file = "yarl-1.20.0-py3-none-any.whl", hash = "sha256:5d0fe6af927a47a230f31e6004621fd0959eaa915fc62acfafa67ff7229a3124"}, + {file = "yarl-1.20.0.tar.gz", hash = "sha256:686d51e51ee5dfe62dec86e4866ee0e9ed66df700d55c828a615640adc885307"}, +] + +[package.dependencies] +idna = ">=2.0" +multidict = ">=4.0" +propcache = ">=0.2.1" + +[[package]] +name = "zipp" +version = "3.21.0" +description = "Backport of pathlib-compatible object wrapper for zip files" +optional = false +python-versions = ">=3.9" +groups = ["main"] +files = [ + {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, + {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, +] + +[package.extras] +check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] +cover = ["pytest-cov"] +doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] +enabler = ["pytest-enabler (>=2.2)"] +test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] +type = ["pytest-mypy"] + +[metadata] +lock-version = "2.1" +python-versions = ">=3.9.21,<3.13" +content-hash = "ce4d91760f3763725e0c449444447c4993738d386a897e47392b772005cb83da" diff --git a/openbb_platform/core/pyproject.toml b/openbb_platform/core/pyproject.toml new file mode 100644 index 0000000000000000000000000000000000000000..59c027f4a933fc9d3a04222665a29fbc8a9ac53c --- /dev/null +++ b/openbb_platform/core/pyproject.toml @@ -0,0 +1,33 @@ +[tool.poetry] +name = "openbb-core" +version = "1.4.7" +description = "OpenBB package with core functionality." +authors = ["OpenBB Team "] +license = "AGPL-3.0-only" +readme = "README.md" +packages = [{ include = "openbb_core" }] + +[tool.poetry.dependencies] +python = ">=3.9.21,<3.13" +uvicorn = "^0.34.2" +websockets = "^15.0" +pandas = ">=1.5.3" +html5lib = "^1.1" +fastapi = "^0.115" +uuid7 = "^0.1.0" +posthog = "^3.3.1" +python-multipart = "^0.0.20" +pydantic = "^2.5.1" +requests = "^2.32.1" +importlib-metadata = ">=6.8.0" +python-dotenv = "^1.0.0" +aiohttp = "^3.11.11" +ruff = "^0.7" # Needed here to lint generated code +pyjwt = "^2.10.1" + +[tool.poetry.scripts] +openbb-build = "openbb_core.build:main" + +[build-system] +requires = ["poetry-core"] +build-backend = "poetry.core.masonry.api" diff --git a/openbb_platform/core/tests/api/test_auth/test_user_auth.py b/openbb_platform/core/tests/api/test_auth/test_user_auth.py new file mode 100644 index 0000000000000000000000000000000000000000..fcc6842e70e83b98d1c15aa9a667a4c0d3473879 --- /dev/null +++ b/openbb_platform/core/tests/api/test_auth/test_user_auth.py @@ -0,0 +1,64 @@ +"""Test the user module.""" + +# ruff: noqa: S105 S106 + +import asyncio +from unittest.mock import MagicMock, patch + +import pytest +from fastapi.security import HTTPBasicCredentials +from openbb_core.api.auth.user import ( + UserSettings, + authenticate_user, + get_user_service, + get_user_settings, +) + + +@pytest.mark.parametrize( + "error, correct, received", + [ + (True, (None, None), ("user", "pass")), + (True, ("user", "pass"), ("", "")), + (True, ("user", "pass"), ("random", "pass")), + (True, ("user", "pass"), ("user", "random")), + (False, ("", ""), ("", "")), + (False, ("user", "pass"), ("user", "pass")), + ], +) +@patch("openbb_core.api.auth.user.Env") +@patch("openbb_core.api.auth.user.HTTPBasicCredentials") +def test_authenticate_user(mock_credentials, mock_env, error, correct, received): + """Test authenticate user.""" + mock_env.return_value.API_USERNAME = correct[0] + mock_env.return_value.API_PASSWORD = correct[1] + mock_credentials = HTTPBasicCredentials(username=received[0], password=received[1]) + + if error: + with pytest.raises(Exception): + result = asyncio.run(authenticate_user(mock_credentials)) + else: + result = asyncio.run(authenticate_user(mock_credentials)) + assert result is None + + +@patch("openbb_core.api.auth.user.UserService") +def test_get_user_service(mock_user_service): + """Test get_user_service.""" + + mock_user_service.return_value = MagicMock() + + asyncio.run(get_user_service()) + + mock_user_service.assert_called_once_with() + + +@patch("openbb_core.api.auth.user.UserService") +def test_get_user_settings_(mock_user_service): + """Test get_user.""" + mock_user_settings = MagicMock(spec=UserSettings, profile=MagicMock(active=True)) + mock_user_service.read_from_file.return_value = mock_user_settings + mock_user_service.return_value = mock_user_service + result = asyncio.run(get_user_settings(MagicMock(), mock_user_service)) # type: ignore[arg-type] + + assert result == mock_user_settings diff --git a/openbb_platform/core/tests/api/test_dependency/__init__.py b/openbb_platform/core/tests/api/test_dependency/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..55cb40ff9e8e017663d6bec31b52bd5fbe993077 --- /dev/null +++ b/openbb_platform/core/tests/api/test_dependency/__init__.py @@ -0,0 +1 @@ +"""OpenBB Platform Core tests.""" diff --git a/openbb_platform/core/tests/api/test_dependency/test_coverage.py b/openbb_platform/core/tests/api/test_dependency/test_coverage.py new file mode 100644 index 0000000000000000000000000000000000000000..38bef9cced3e3116b7dc8852e22e88e7622f679f --- /dev/null +++ b/openbb_platform/core/tests/api/test_dependency/test_coverage.py @@ -0,0 +1,14 @@ +"""Test the coverate module.""" + +import asyncio +from unittest.mock import MagicMock + +from openbb_core.api.dependency.coverage import get_command_map + + +def test_get_system_settings(): + """Test get_system_settings.""" + + response = asyncio.run(get_command_map(MagicMock())) # type: ignore[arg-type] + + assert response diff --git a/openbb_platform/core/tests/api/test_dependency/test_system.py b/openbb_platform/core/tests/api/test_dependency/test_system.py new file mode 100644 index 0000000000000000000000000000000000000000..953f9e2b38407f3cde2445052179bb9d87925d8e --- /dev/null +++ b/openbb_platform/core/tests/api/test_dependency/test_system.py @@ -0,0 +1,19 @@ +"""Test the system module.""" + +import asyncio +from unittest.mock import MagicMock, patch + +from openbb_core.api.dependency.system import ( + SystemSettings, + get_system_settings, +) + + +@patch("openbb_core.api.dependency.system.SystemService") +def test_get_system_settings(mock_system_service): + """Test get_system_settings.""" + mock_system_service.return_value.system_settings = SystemSettings() + + response = asyncio.run(get_system_settings(MagicMock(), mock_system_service)) # type: ignore[arg-type] + + assert response diff --git a/openbb_platform/core/tests/api/test_rest_api.py b/openbb_platform/core/tests/api/test_rest_api.py new file mode 100644 index 0000000000000000000000000000000000000000..51b06d60a762181e51bf091d063133a14b17822d --- /dev/null +++ b/openbb_platform/core/tests/api/test_rest_api.py @@ -0,0 +1,8 @@ +"""Test rest_api.py.""" + +from openbb_core.api.rest_api import app + + +def test_openapi(): + """Test openapi schema generation.""" + assert app.openapi() diff --git a/openbb_platform/core/tests/api/test_router/__init__.py b/openbb_platform/core/tests/api/test_router/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..21bb0e0cc06b67d6d416e53cb6267ded8f547035 --- /dev/null +++ b/openbb_platform/core/tests/api/test_router/__init__.py @@ -0,0 +1 @@ +"""OpenBB Platform Core API router tests.""" diff --git a/openbb_platform/core/tests/api/test_router/test_router_coverage.py b/openbb_platform/core/tests/api/test_router/test_router_coverage.py new file mode 100644 index 0000000000000000000000000000000000000000..1ab310f7ffbf30362ef35611919b18487532bb08 --- /dev/null +++ b/openbb_platform/core/tests/api/test_router/test_router_coverage.py @@ -0,0 +1,39 @@ +"""Test coverage for the router module.""" + +from unittest.mock import patch + +from openbb_core.api.router.coverage import get_command_coverage, get_provider_coverage + + +@patch("openbb_core.api.router.coverage.CommandMap") +def test_get_provider_coverage(mock_command_map): + """Test get provider coverage.""" + mock_command_map.return_value.provider_coverage = { + "provider1": ["coverage1", "coverage2"] + } + + response = get_provider_coverage(mock_command_map) + + assert response + + +@patch("openbb_core.api.router.coverage.CommandMap") +def test_get_command_coverage(mock_command_map): + """Test get command coverage.""" + mock_command_map.return_value.command_coverage = { + "command1": ["coverage1", "coverage2"] + } + + response = get_command_coverage(mock_command_map) + assert response + + +@patch("openbb_core.api.router.coverage.CommandMap") +@patch("openbb_core.api.router.coverage.ProviderInterface") +def test_get_command_model(mock_provider_interface, mock_command_map): + """Test get command model.""" + mock_command_map.return_value.commands_model = {"command1": "model1"} + mock_provider_interface.return_value.map = {"model1": "provider1"} + + response = get_command_coverage(mock_command_map) + assert response diff --git a/openbb_platform/core/tests/api/test_router/test_router_system.py b/openbb_platform/core/tests/api/test_router/test_router_system.py new file mode 100644 index 0000000000000000000000000000000000000000..ec408ea44c37a818dcf158d10d29ba12c667dac0 --- /dev/null +++ b/openbb_platform/core/tests/api/test_router/test_router_system.py @@ -0,0 +1,16 @@ +"""Test the router system module.""" + +from unittest.mock import patch + +from openbb_core.api.router.system import get_system_model +from openbb_core.app.model.system_settings import SystemSettings + + +@patch("openbb_core.api.router.system.get_system_settings") +def test_get_system_model(mock_get_system_settings): + """Test get system model.""" + mock_get_system_settings.return_value = SystemSettings() + + response = get_system_model(mock_get_system_settings) + + assert response diff --git a/openbb_platform/core/tests/api/test_router/test_router_user.py b/openbb_platform/core/tests/api/test_router/test_router_user.py new file mode 100644 index 0000000000000000000000000000000000000000..e0d84ebd243d95294853e6b10190ef3a50ec2f6a --- /dev/null +++ b/openbb_platform/core/tests/api/test_router/test_router_user.py @@ -0,0 +1,17 @@ +"""Test the router settings.py module.""" + +import asyncio +from unittest.mock import Mock + +from openbb_core.api.router.user import read_user_settings + +# ruff: noqa: S106 + + +def test_read_user_settings(): + """Test read user settings.""" + mock_user_settings = Mock() + + result = asyncio.run(read_user_settings(user_settings=mock_user_settings)) + + assert result == mock_user_settings diff --git a/openbb_platform/core/tests/app/__init__.py b/openbb_platform/core/tests/app/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3f4c8aabddaf03f769d25babcef1c521d87642b1 --- /dev/null +++ b/openbb_platform/core/tests/app/__init__.py @@ -0,0 +1 @@ +"""OpenBB Platform core tests.""" diff --git a/openbb_platform/core/tests/app/logs/__init__.py b/openbb_platform/core/tests/app/logs/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..673bc2dec2f0cdbaafc87897342363043c6f0c40 --- /dev/null +++ b/openbb_platform/core/tests/app/logs/__init__.py @@ -0,0 +1 @@ +"""OpenBB Platform Core.""" diff --git a/openbb_platform/core/tests/app/logs/formatters/__init__.py b/openbb_platform/core/tests/app/logs/formatters/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3f4c8aabddaf03f769d25babcef1c521d87642b1 --- /dev/null +++ b/openbb_platform/core/tests/app/logs/formatters/__init__.py @@ -0,0 +1 @@ +"""OpenBB Platform core tests.""" diff --git a/openbb_platform/core/tests/app/logs/formatters/test_formatter_with_exceptions.py b/openbb_platform/core/tests/app/logs/formatters/test_formatter_with_exceptions.py new file mode 100644 index 0000000000000000000000000000000000000000..625c0bc87cc8e8315c15dcdcf1f9cc3391089c25 --- /dev/null +++ b/openbb_platform/core/tests/app/logs/formatters/test_formatter_with_exceptions.py @@ -0,0 +1,325 @@ +"""Tests for the FormatterWithExceptions class.""" + +import logging +import os +from unittest.mock import Mock + +import pytest +from openbb_core.app.logs.formatters.formatter_with_exceptions import ( + FormatterWithExceptions, +) + +# pylint: disable=W0621 + +logging_settings = Mock() +logging_settings.app_name = "test_app_name" +logging_settings.app_id = "test_app_id" +logging_settings.session_id = "test_session_id" +logging_settings.user_id = "test_user_id" + + +@pytest.fixture +def formatter(): + """Return a FormatterWithExceptions instance.""" + return FormatterWithExceptions(logging_settings) + + +# Test when exc_text is not empty (should return "X") +def test_level_name_with_exc_text(formatter): + """Test the calculate_level_name method.""" + record = logging.LogRecord( + name="test_logger", + level=logging.INFO, + pathname="", + lineno=0, + msg="", + args=None, + exc_info=None, + func=None, + ) + record.exc_text = "Exception occurred!" + assert formatter.calculate_level_name(record) == "X" + + +# Test when levelname is not empty (should return the first character of levelname) +def test_level_name_with_levelname(formatter): + """Test the calculate_level_name method.""" + record = logging.LogRecord( + name="test_logger", + level=logging.WARNING, + pathname="", + lineno=0, + msg="", + args=None, + exc_info=None, + func=None, + ) + record.levelname = "WARNING" + assert formatter.calculate_level_name(record) == "W" + + +# Test when func_name_override and session_id are present in the record +def test_extract_log_extra_with_override_and_session_id(formatter): + """Test the extract_log_extra method.""" + record = logging.LogRecord( + name="test_logger", + level=logging.INFO, + pathname="", + lineno=0, + msg="", + args=None, + exc_info=None, + func=None, + ) + record.func_name_override = "custom_function" + record.session_id = "1234567890" + + log_extra = formatter.extract_log_extra(record) + + assert log_extra == { + "sessionId": "1234567890", + } + + +# Test when only func_name_override is present in the record +def test_extract_log_extra_with_override(formatter): + """Test the extract_log_extra method.""" + record = logging.LogRecord( + name="test_logger", + level=logging.INFO, + pathname="", + lineno=0, + msg="", + args=None, + exc_info=None, + func=None, + ) + record.func_name_override = "custom_function" + + log_extra = formatter.extract_log_extra(record) + + assert log_extra == {} + assert record.funcName == "custom_function" + assert record.lineno == 0 + + +# Test when only session_id is present in the record +def test_extract_log_extra_with_session_id(formatter): + """Test the extract_log_extra method.""" + record = logging.LogRecord( + name="test_logger", + level=logging.INFO, + pathname="", + lineno=0, + msg="", + args=None, + exc_info=None, + func=None, + ) + record.session_id = "1234567890" + + log_extra = formatter.extract_log_extra(record) + + assert log_extra == { + "sessionId": "1234567890", + } + + +# Test when neither func_name_override nor session_id are present in the record +def test_extract_log_extra_with_no_override_or_session_id(formatter): + """Test the extract_log_extra method.""" + record = logging.LogRecord( + name="test_logger", + level=logging.INFO, + pathname="", + lineno=0, + msg="", + args=None, + exc_info=None, + func=None, + ) + + log_extra = formatter.extract_log_extra(record) + + assert log_extra == {} + + +@pytest.mark.parametrize( + "input_text, expected_output", + [ + ("Hello, this is 192.168.1.1", "Hello, this is FILTERED_IP "), + ("IP address: 10.0.0.1", "IP address: FILTERED_IP "), + ("No IPs here!", "No IPs here!"), + ("Another IP: 172.16.32.1", "Another IP: FILTERED_IP "), + ("1.2.3.4", " FILTERED_IP "), + ], +) +def test_mock_ipv4(input_text, expected_output, formatter): + """Test the mock_ipv4 method.""" + assert formatter.mock_ipv4(input_text) == expected_output + + +@pytest.mark.parametrize( + "input_text, expected_output", + [ + ("Send an email to john@example.com", "Send an email to FILTERED_EMAIL "), + ("No emails here!", "No emails here!"), + ("Another email: alice@example.co.uk", "Another email: FILTERED_EMAIL "), + ("Contact us: support@example.net", "Contact us: FILTERED_EMAIL "), + ("myemail@example.com 12345", " FILTERED_EMAIL 12345"), + ], +) +def test_mock_email(input_text, expected_output, formatter): + """Test the mock_email method.""" + assert formatter.mock_email(input_text) == expected_output + + +@pytest.mark.parametrize( + "input_text, expected_output", + [ + ('{"password": "secure_pass_123"}', '{"password": " FILTERED_PASSWORD "}'), + ('{"password": "p@$$w0rd!"}', '{"password": " FILTERED_PASSWORD "}'), + ('{"password": "12345"}', '{"password": " FILTERED_PASSWORD "}'), + ('{"password": "abc"}', '{"password": " FILTERED_PASSWORD "}'), + ( + '{"password": "my_password"} {"password": "test123"}', + '{"password": " FILTERED_PASSWORD "} {"password": " FILTERED_PASSWORD "}', + ), + ], +) +def test_mock_password(input_text, expected_output, formatter): + """Test the mock_password method.""" + assert formatter.mock_password(input_text) == expected_output + + +@pytest.mark.parametrize( + "input_text, expected_output", + [ + ('{"FLAIR": "[python]"}', '{"FLAIR": "[ FILTERED_FLAIR ]"}'), + ('{"FLAIR": "[question]"}', '{"FLAIR": "[ FILTERED_FLAIR ]"}'), + ('{"FLAIR": "[bug]"}', '{"FLAIR": "[ FILTERED_FLAIR ]"}'), + ('{"FLAIR": "[feature]"}', '{"FLAIR": "[ FILTERED_FLAIR ]"}'), + ('{"FLAIR": "[announcement]"}', '{"FLAIR": "[ FILTERED_FLAIR ]"}'), + ('{"FLAIR": "[FILTERED_FLAIR]"}', '{"FLAIR": "[ FILTERED_FLAIR ]"}'), + ], +) +def test_mock_flair(input_text, expected_output, formatter): + """Test the mock_flair method.""" + assert formatter.mock_flair(input_text) == expected_output + + +@pytest.mark.parametrize( + "input_text, expected_output", + [ + ( + "This is C:\\Users\\username\\file.txt", + "This is C:/Users/username/file.txt", + ), + ("No home directory here!", "No home directory here!"), + ( + f"This is {os.path.expanduser('~')}\\file.txt", + "This is MOCKING_USER_PATH/file.txt", + ), + ( + f"Some path: {os.path.expanduser('~/subfolder')}", + "Some path: MOCKING_USER_PATH/subfolder", + ), + ], +) +def test_mock_home_directory(input_text, expected_output, formatter): + """Test the mock_home_directory method.""" + assert formatter.mock_home_directory(input_text) == expected_output + + +@pytest.mark.parametrize( + "input_text, expected_output", + [ + ( + "This is a string with a\nnewline", + "This is a string with a MOCKING_BREAKLINE newline", + ), + ("'Traceback occurred", "Traceback occurred"), + ("No newlines or 'Traceback'", "No newlines or Traceback'"), + ("'Traceback in the\nmiddle'", "Traceback in the MOCKING_BREAKLINE middle'"), + ( + "Multiple\n\nnewlines", + "Multiple MOCKING_BREAKLINE MOCKING_BREAKLINE newlines", + ), + ], +) +def test_filter_special_tags(input_text, expected_output, formatter): + """Test the filter_special_tags method.""" + assert formatter.filter_special_tags(input_text) == expected_output + + +@pytest.mark.parametrize( + "input_text, expected_output", + [ + ( + f"This is {os.path.expanduser('~')}\\file.txt", + "This is MOCKING_USER_PATH/file.txt", + ), + ('{"FLAIR": "[announcement]"}', '{"FLAIR": "[ FILTERED_FLAIR ]"}'), + ("Another email: alice@example.co.uk", "Another email: FILTERED_EMAIL "), + ("Another IP: 172.16.32.1", "Another IP: FILTERED_IP "), + ('{"password": "secure_pass_123"}', '{"password": " FILTERED_PASSWORD "}'), + ], +) +def test_filter_piis(input_text, expected_output, formatter): + """Test the filter_piis method.""" + assert formatter.filter_piis(input_text) == expected_output + + +@pytest.mark.parametrize( + "input_text, expected_output", + [ + ( + f"This is {os.path.expanduser('~')}\\file.txt", + "This is MOCKING_USER_PATH/file.txt", + ), + ('{"FLAIR": "[announcement]"}', '{"FLAIR": "[ FILTERED_FLAIR ]"}'), + ("Another email: alice@example.co.uk", "Another email: FILTERED_EMAIL "), + ("Another IP: 172.16.32.1", "Another IP: FILTERED_IP "), + ('{"password": "secure_pass_123"}', '{"password": " FILTERED_PASSWORD "}'), + ("'Traceback in the\nmiddle'", "Traceback in the MOCKING_BREAKLINE middle'"), + ], +) +def test_filter_log_line(input_text, expected_output, formatter): + """Test the filter_log_line method.""" + assert formatter.filter_log_line(input_text) == expected_output + + +def test_formatException_invalid(): + """Test the formatException method with an invalid exception.""" + with pytest.raises(Exception): + formatter.formatException(Exception("Big bad error")) # type: ignore[attr-defined] + + +def test_format(formatter): + """Test the format method.""" + # Prepare the log record + log_record = logging.LogRecord( + name="test_logger", + level=logging.INFO, + pathname="/path/to/module.py", + lineno=42, + msg="This is a test log message with | symbol", + args=(), + exc_info=None, + ) + + # Set up the mock objects + formatter.calculate_level_name = Mock(return_value="INFO") + formatter.filter_log_line = Mock(return_value="Filtered log message") + + # Call the format method + formatted_log = formatter.format(log_record) + + # Assertions + expected_log = "INFO|test_app_name|unknown-commit|test_app_id|test_session_id|test_user_id|Filtered log message" + + assert formatted_log == expected_log + + # Check if the mock methods were called + formatter.calculate_level_name.assert_called_once() + formatter.filter_log_line.assert_called_once() diff --git a/openbb_platform/core/tests/app/logs/handlers/__init__.py b/openbb_platform/core/tests/app/logs/handlers/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..55cb40ff9e8e017663d6bec31b52bd5fbe993077 --- /dev/null +++ b/openbb_platform/core/tests/app/logs/handlers/__init__.py @@ -0,0 +1 @@ +"""OpenBB Platform Core tests.""" diff --git a/openbb_platform/core/tests/app/logs/handlers/test_path_tracking_file_handler.py b/openbb_platform/core/tests/app/logs/handlers/test_path_tracking_file_handler.py new file mode 100644 index 0000000000000000000000000000000000000000..5e19ff04f6b9fc3655e18081e9ed8c7dfc30a81b --- /dev/null +++ b/openbb_platform/core/tests/app/logs/handlers/test_path_tracking_file_handler.py @@ -0,0 +1,80 @@ +"""Test the path_tracking_file_handler.py file.""" + +# pylint: disable=redefined-outer-name + +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest +from openbb_core.app.logs.handlers.path_tracking_file_handler import ( + PathTrackingFileHandler, +) + + +class MockLoggingSettings: + """Mock logging settings.""" + + def __init__(self, app_name, user_logs_directory, session_id, frequency): + """Initialize the mock logging settings.""" + self.app_name = app_name + self.user_logs_directory = Path(user_logs_directory) + self.session_id = session_id + self.frequency = frequency + + +logging_settings = MagicMock(spec=MockLoggingSettings) +logging_settings.app_name = "test_app_name_xpto" +logging_settings.user_logs_directory = MagicMock() +logging_settings.user_logs_directory.absolute.return_value = Path( + "/mocked/logs/directory" +) +logging_settings.session_id = "test_session_id" +logging_settings.frequency = "H" +logging_settings.logging_suppress = False +logging_settings.log_collect = True + + +@pytest.fixture(scope="module") +def mocked_path(tmp_path_factory): + """Fixture to create a mocked file path.""" + return tmp_path_factory.mktemp("mocked_path") / "mocked_file.log" + + +@pytest.fixture(scope="module") +def handler(mocked_path): + """Fixture to create a PathTrackingFileHandler instance.""" + # patch `pathlib.Path.joinpath` to return a string containing the joined path + with patch.object(Path, "joinpath", return_value=mocked_path): + return PathTrackingFileHandler(logging_settings) + + +def test_build_log_file_path(handler, mocked_path): + """Test build_log_file_path method.""" + # Define a sample LoggingSettings object with mock attributes + settings = MagicMock(spec=MockLoggingSettings) + settings.app_name = "my_app" + settings.user_logs_directory = MagicMock() + settings.user_logs_directory.absolute.return_value = Path("/mocked/logs/directory") + settings.session_id = "abc123" + + # patch `pathlib.Path.joinpath` to return a string containing the joined path + with patch.object(Path, "joinpath", return_value=mocked_path) as mock_joinpath: + result_path = handler.build_log_file_path(settings) + + # Assert the result is correct + assert result_path == mocked_path + + mock_joinpath.assert_called_once_with("my_app_abc123") + + +def test_clean_expired_files(handler): + """Test clean_expired_files method.""" + with patch( + "openbb_core.app.logs.handlers.path_tracking_file_handler.get_expired_file_list" + ) as mock_get_expired_file_list, patch( + "openbb_core.app.logs.handlers.path_tracking_file_handler.remove_file_list" + ) as mock_remove_file_list: + handler.clean_expired_files(123) + + assert mock_get_expired_file_list.call_count == 3 + assert mock_remove_file_list.call_count == 3 diff --git a/openbb_platform/core/tests/app/logs/handlers/test_posthog_handler.py b/openbb_platform/core/tests/app/logs/handlers/test_posthog_handler.py new file mode 100644 index 0000000000000000000000000000000000000000..5d2518b36a8c193453de49bea2d8e958c898c593 --- /dev/null +++ b/openbb_platform/core/tests/app/logs/handlers/test_posthog_handler.py @@ -0,0 +1,235 @@ +"""Tests for the PosthogHandler class.""" + +import logging +from pathlib import Path +from unittest.mock import MagicMock + +import pytest +from openbb_core.app.logs.handlers.posthog_handler import ( + PosthogHandler, +) + + +# pylint: disable=W0621, R0913 +class MockLoggingSettings: + """Mock logging settings.""" + + def __init__( + self, + app_name, + sub_app_name, + user_logs_directory, + session_id, + frequency, + appid, + platform, + python_version, + platform_version, + userid, + ): + """Initialize the mock logging settings.""" + self.app_name = app_name + self.sub_app_name = sub_app_name + self.user_logs_directory = Path(user_logs_directory) + self.session_id = session_id + self.frequency = frequency + self.app_id = appid + self.platform = platform + self.python_version = python_version + self.platform_version = platform_version + self.user_id = userid + + +logging_settings = MagicMock(spec=MockLoggingSettings) +logging_settings.app_name = "TestApp" +logging_settings.sub_app_name = "TestSubApp" +logging_settings.user_logs_directory = MagicMock() +logging_settings.user_logs_directory.absolute.return_value = Path( + "/mocked/logs/directory" +) +logging_settings.session_id = "session123" +logging_settings.frequency = "H" +logging_settings.app_id = "test123" +logging_settings.platform = "Windows" +logging_settings.python_version = "3.9" +logging_settings.platform_version = "1.2.3" +logging_settings.user_id = "user123" +logging_settings.logging_suppress = False +logging_settings.log_collect = True + + +@pytest.fixture +def handler(): + """Fixture to create a PosthogHandler instance.""" + return PosthogHandler(logging_settings) + + +def test_emit_calls_send(handler): + """Test the emit method.""" + # Arrange + record = logging.LogRecord( + name="test_logger", + level=logging.INFO, + pathname="test.py", + lineno=42, + msg="Test message", + args=None, + exc_info=None, + ) + + # Mock the send method + handler.send = MagicMock() + + # Act + handler.emit(record) + + # Assert + handler.send.assert_called_once_with(record=record) + + +def test_emit_calls_handleError_when_send_raises_exception(handler): + """Test the emit method.""" + # Arrange + record = logging.LogRecord( + name="test_logger", + level=logging.ERROR, + pathname="test.py", + lineno=42, + msg="Test error message", + args=None, + exc_info=None, + ) + + # Mock the send method to raise an exception + handler.send = MagicMock(side_effect=Exception) + + # Mock the handleError method + handler.handleError = MagicMock() + + # Act + try: + handler.emit(record) + except Exception as e: + assert isinstance(e, Exception) + + # Assert + handler.send.assert_called_once_with(record=record) + handler.handleError.assert_called_once_with(record) + + +def test_emit_calls_handleError_when_send_raises_exception_of_specific_type(handler): + """Test the emit method.""" + # Arrange + record = logging.LogRecord( + name="test_logger", + level=logging.ERROR, + pathname="test.py", + lineno=42, + msg="Test error message", + args=None, + exc_info=None, + ) + + # Mock the send method to raise an exception of a specific type + handler.send = MagicMock(side_effect=ValueError) + + # Mock the handleError method + handler.handleError = MagicMock() + + # Act + try: + handler.emit(record) + except Exception as e: + assert isinstance(e, ValueError) + + # Assert + handler.send.assert_called_once_with(record=record) + handler.handleError.assert_called_once_with(record) + + +def test_emit_calls_handleError_when_send_raises_exception_of_another_type(handler): + """Test the emit method.""" + # Arrange + record = logging.LogRecord( + name="test_logger", + level=logging.ERROR, + pathname="test.py", + lineno=42, + msg="Test error message", + args=None, + exc_info=None, + ) + + # Mock the send method to raise an exception of another type + handler.send = MagicMock(side_effect=TypeError) + + # Mock the handleError method + handler.handleError = MagicMock() + + # Act + try: + handler.emit(record) + except Exception as e: + assert isinstance(e, TypeError) + + # Assert + handler.send.assert_called_once_with(record=record) + handler.handleError.assert_called_once_with(record) + + +@pytest.mark.parametrize( + "log_info, expected_dict", + [ + ( + 'STARTUP: {"status": "success"}', + {"STARTUP": {"status": "success"}}, + ), + ( + 'CMD: {"path": "/stocks/", "known_cmd": "load", "other_args": "aapl", "input": "load aapl"}', + { + "CMD": { + "path": "/stocks/", + "known_cmd": "load", + "other_args": "aapl", + "input": "load aapl", + } + }, + ), + ], +) +def test_log_to_dict(handler, log_info, expected_dict): + """Test the log_to_dict method.""" + # Act + result = handler.log_to_dict(log_info) + + # Assert + assert result == expected_dict + + +@pytest.mark.parametrize( + "record, expected_extra", + [ + ( + logging.LogRecord( + "name", logging.INFO, "pathname", 42, "message", (), None, None + ), + { + "appName": "TestApp", + "subAppName": "TestSubApp", + "appId": "test123", + "sessionId": "session123", + "platform": "Windows", + "pythonVersion": "3.9", + "obbPlatformVersion": "1.2.3", + "userId": "user123", + }, + ), + ], +) +def test_extract_log_extra(handler, record, expected_extra): + """Test the extract_log_extra method.""" + # Act + result = handler.extract_log_extra(record) + + # Assert + assert result == expected_extra diff --git a/openbb_platform/core/tests/app/logs/test_handlers_manager.py b/openbb_platform/core/tests/app/logs/test_handlers_manager.py new file mode 100644 index 0000000000000000000000000000000000000000..e34f63adb896a2a2963bd555319f90c0c09414a6 --- /dev/null +++ b/openbb_platform/core/tests/app/logs/test_handlers_manager.py @@ -0,0 +1,108 @@ +"""Tests for the handlers manager.""" + +import logging +from unittest.mock import Mock, patch + +from openbb_core.app.logs.handlers_manager import ( + HandlersManager, + PathTrackingFileHandler, + PosthogHandler, +) + +# pylint: disable=W0231 + + +class MockPosthogHandler(logging.NullHandler): + """Mock posthog handler.""" + + def __init__(self, settings): + """Initialize the handler.""" + self.settings = settings + self.level = logging.DEBUG + + +class MockPathTrackingFileHandler(logging.NullHandler): + """Mock path tracking file handler.""" + + def __init__(self, settings): + """Initialize the handler.""" + self.settings = settings + self.level = logging.DEBUG + + +class MockFormatterWithExceptions(logging.Formatter): + """Mock formatter with exceptions.""" + + def __init__(self, settings): + """Initialize the formatter.""" + self.settings = settings + + +def test_handlers_added_correctly(): + """Test if the handlers are added correctly.""" + with patch( + "openbb_core.app.logs.handlers_manager.PosthogHandler", + MockPosthogHandler, + ), patch( + "openbb_core.app.logs.handlers_manager.PathTrackingFileHandler", + MockPathTrackingFileHandler, + ), patch( + "openbb_core.app.logs.handlers_manager.FormatterWithExceptions", + MockFormatterWithExceptions, + ): + settings = Mock() + settings.verbosity = 20 + settings.handler_list = ["stdout", "stderr", "noop", "file", "posthog"] + settings.log_collect = True + settings.logging_suppress = False + logger = logging.getLogger("test_handlers_added_correctly") + handlers_manager = HandlersManager(logger=logger, settings=settings) + handlers_manager.setup() + handlers = logger.handlers + + assert not logger.propagate + assert logger.level == 20 + assert len(handlers) >= 5 + + for handler in handlers: + assert isinstance( + handler, + ( + logging.NullHandler, + logging.StreamHandler, + PathTrackingFileHandler, + PosthogHandler, + ), + ) + + for mock in [MockPosthogHandler, MockPathTrackingFileHandler]: + assert any(isinstance(handler, mock) for handler in handlers) + + +def test_update_handlers(): + """Test if the handlers are updated correctly.""" + with patch( + "openbb_core.app.logs.handlers_manager.PosthogHandler", + MockPosthogHandler, + ), patch( + "openbb_core.app.logs.handlers_manager.PathTrackingFileHandler", + MockPathTrackingFileHandler, + ), patch( + "openbb_core.app.logs.handlers_manager.FormatterWithExceptions", + MockFormatterWithExceptions, + ): + settings = Mock() + settings.handler_list = ["file", "posthog"] + settings.any_other_attr = "mock_settings" + logger = logging.getLogger("test_update_handlers") + handlers_manager = HandlersManager(logger=logger, settings=settings) + + changed_settings = Mock() + changed_settings.any_other_attr = "changed_settings" + + handlers_manager.update_handlers(settings=changed_settings) + + for hdlr in logger.handlers: + if isinstance(hdlr, (MockPosthogHandler, MockPathTrackingFileHandler)): + assert hdlr.settings == changed_settings + assert hdlr.formatter.settings == changed_settings # type: ignore[union-attr] diff --git a/openbb_platform/core/tests/app/logs/test_logging_service.py b/openbb_platform/core/tests/app/logs/test_logging_service.py new file mode 100644 index 0000000000000000000000000000000000000000..369bc3a5319f1146364957d53e1782f8184405b3 --- /dev/null +++ b/openbb_platform/core/tests/app/logs/test_logging_service.py @@ -0,0 +1,243 @@ +"""Test LoggingService class.""" + +import json +from typing import Optional +from unittest.mock import MagicMock, Mock, patch + +import pytest +from openbb_core.app.logs.logging_service import LoggingService +from openbb_core.app.model.abstract.error import OpenBBError +from pydantic import BaseModel + +# ruff: noqa: S106 +# pylint: disable=redefined-outer-name, protected-access + + +class MockSystemSettings: + """Mock system settings.""" + + def __init__(self): + """Initialize the mock system settings.""" + self.logging_suppress = False + self.log_collect = True + + +class MockLoggingSettings: + """Mock logging settings.""" + + def __init__(self, system_settings, user_settings): + """Initialize the mock logging settings.""" + self.system_settings = system_settings + self.user_settings = user_settings + self.logging_suppress = False + self.log_collect = True + + +class MockOBBject(BaseModel): + """Mock object for testing.""" + + output: Optional[str] = None + error: Optional[str] = None + + +@pytest.fixture(scope="function") +def logging_service(): + """Return a LoggingService instance.""" + mock_system_settings = MockSystemSettings() + mock_user_settings = Mock() + mock_setup_handlers = Mock() + mock_log_startup = Mock() + + with patch( + "openbb_core.app.logs.logging_service.LoggingSettings", + MockLoggingSettings, + ), patch( + "openbb_core.app.logs.logging_service.LoggingService._setup_handlers", + mock_setup_handlers, + ), patch( + "openbb_core.app.logs.logging_service.LoggingService._log_startup", + mock_log_startup, + ): + _logging_service = LoggingService( + system_settings=mock_system_settings, + user_settings=mock_user_settings, + ) + _logging_service._logger = MagicMock() + + return _logging_service + + +def test_correctly_initialized(): + """Test the LoggingService is correctly initialized.""" + mock_system_settings = Mock() + mock_user_settings = Mock() + mock_setup_handlers = Mock() + mock_log_startup = Mock() + + with patch( + "openbb_core.app.logs.logging_service.LoggingSettings", + MockLoggingSettings, + ), patch( + "openbb_core.app.logs.logging_service.LoggingService._setup_handlers", + mock_setup_handlers, + ), patch( + "openbb_core.app.logs.logging_service.LoggingService._log_startup", + mock_log_startup, + ): + _ = LoggingService( + system_settings=mock_system_settings, + user_settings=mock_user_settings, + ) + mock_setup_handlers.assert_called_once() + mock_log_startup.assert_called_once() + + +def test_logging_settings_setter(logging_service): + """Test the logging_settings setter.""" + custom_user_settings = "custom_user_settings" + custom_system_settings = "custom_system_settings" + + with patch( + "openbb_core.app.logs.logging_service.LoggingSettings", + MockLoggingSettings, + ): + logging_service.logging_settings = ( + custom_system_settings, + custom_user_settings, + ) + + assert logging_service.logging_settings.system_settings == "custom_system_settings" # type: ignore[attr-defined] + assert logging_service.logging_settings.user_settings == "custom_user_settings" # type: ignore[attr-defined] + + +def test_log_startup(logging_service): + """Test the log_startup method.""" + + class MockCredentials(BaseModel): + username: str + password: str + + logging_service._user_settings = MagicMock( + preferences="your_preferences", + credentials=MockCredentials(username="username", password="password"), + ) + logging_service._system_settings = "your_system_settings" + logging_service._system_settings + + logging_service._log_startup( + route="test_route", custom_headers={"X-OpenBB-Test": "test"} + ) + + expected_log_data = { + "route": "test_route", + "PREFERENCES": "your_preferences", + "KEYS": { + "username": "defined", + "password": "defined", # pragma: allowlist secret + }, + "SYSTEM": "your_system_settings", + "custom_headers": {"X-OpenBB-Test": "test"}, + } + logging_service._logger.info.assert_called_once_with( + "STARTUP: %s ", + json.dumps(expected_log_data), + ) + + +@pytest.mark.parametrize( + "user_settings, system_settings, route, func, kwargs, exec_info, custom_headers, expected_log_message", + [ + ( + "mock_settings", + MockSystemSettings(), + "mock_route", + "mock_func", + {}, + (None, None, None), + None, + 'CMD: {"route": "mock_route", "input": {}, "error": null, ' + + '"provider": "not_passed_to_kwargs", "custom_headers": null}', + ), + ( + "mock_settings", + MockSystemSettings(), + "mock_route", + "mock_func", + {}, + ( + OpenBBError, + OpenBBError("mock_error"), + ..., + ), # ... is of TracebackType, but unnecessary for the test + {"X-OpenBB-Test": "test"}, + 'ERROR: {"route": "mock_route", "input": {}, "error": "mock_error", "provider": "not_passed_to_kwargs", "custom_headers": {"X-OpenBB-Test": "test"}}', # noqa: E501 + ), + ( + "mock_settings", + MockSystemSettings(), + "login", + "mock_func", + {}, + (None, None, None), + {"X-OpenBB-Test1": "test1", "X-OpenBB-Test2": "test2"}, + "STARTUP", + ), + ], +) +def test_log( + logging_service, + user_settings, + system_settings, + route, + func, + kwargs, + exec_info, + custom_headers, + expected_log_message, +): + """Test the log method.""" + with patch( + "openbb_core.app.logs.logging_service.LoggingSettings", + MockLoggingSettings, + ): + if route == "login": + with patch( + "openbb_core.app.logs.logging_service.LoggingService._log_startup" + ) as mock_log_startup: + logging_service.log( + user_settings=user_settings, + system_settings=system_settings, + route=route, + func=func, + kwargs=kwargs, + exec_info=exec_info, + custom_headers=custom_headers, + ) + mock_log_startup.assert_called_once() + + else: + mock_callable = Mock() + mock_callable.__name__ = func + + logging_service.log( + user_settings=user_settings, + system_settings=system_settings, + route=route, + func=mock_callable, + kwargs=kwargs, + exec_info=exec_info, + custom_headers=custom_headers, + ) + + if expected_log_message.startswith("ERROR"): + logging_service._logger.error.assert_called_once_with( + expected_log_message, + extra={"func_name_override": "mock_func"}, + exc_info=exec_info, + ) + if expected_log_message.startswith("CMD"): + logging_service._logger.info.assert_called_once_with( + expected_log_message, + extra={"func_name_override": "mock_func"}, + exc_info=exec_info, + ) diff --git a/openbb_platform/core/tests/app/logs/utils/__init__.py b/openbb_platform/core/tests/app/logs/utils/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..55cb40ff9e8e017663d6bec31b52bd5fbe993077 --- /dev/null +++ b/openbb_platform/core/tests/app/logs/utils/__init__.py @@ -0,0 +1 @@ +"""OpenBB Platform Core tests.""" diff --git a/openbb_platform/core/tests/app/logs/utils/test_expired_files.py b/openbb_platform/core/tests/app/logs/utils/test_expired_files.py new file mode 100644 index 0000000000000000000000000000000000000000..1cb7897eb2599aa79f1a94a6f262c14ad88d4a4a --- /dev/null +++ b/openbb_platform/core/tests/app/logs/utils/test_expired_files.py @@ -0,0 +1,166 @@ +"""Tests for the expired_files module.""" + +import os +import tempfile +from pathlib import Path +from time import time +from typing import List +from unittest.mock import MagicMock, patch + +import pytest +from openbb_core.app.logs.utils.expired_files import ( + get_expired_file_list, + get_timestamp_from_x_days, + remove_file_list, +) + +# pylint: disable=W0621 + + +@pytest.fixture +def temp_test_files(): + """Create temporary files for testing.""" + with tempfile.TemporaryDirectory() as temp_dir: + temp_files = [ + Path(temp_dir) / "file1.txt", + Path(temp_dir) / "file2.txt", + Path(temp_dir) / "file3.txt", + ] + + # Create some test files + for file_path in temp_files: + file_path.touch() + + yield temp_dir, temp_files + + +def test_get_timestamp_from_x_days(): + """Test get_timestamp_from_x_days.""" + result = get_timestamp_from_x_days(0) + assert isinstance(result, float) + + +# Test case when all files are expired +def test_all_files_expired(temp_test_files): + """Test get_expired_file_list when all files are expired.""" + temp_dir, temp_files = temp_test_files + before_timestamp = time() + 3 * 86400 # timestamp 3 days from now + expired_files = get_expired_file_list(Path(temp_dir), before_timestamp) + assert set(expired_files) == set(temp_files) + + +# Test case when no files are expired +def test_no_files_expired(temp_test_files): + """Test get_expired_file_list when no files are expired.""" + temp_dir, _ = temp_test_files + before_timestamp = 0 + expired_files = get_expired_file_list(Path(temp_dir), before_timestamp) + assert not expired_files + + +# Test case when some files are expired and some are not +def test_some_files_expired(temp_test_files): + """Test get_expired_file_list when some files are expired and some are not.""" + temp_dir, _ = temp_test_files + + # add temp file to temp_dir with timestamp in the future + temp_file = Path(temp_dir) / "file4.txt" + temp_file.touch() + time_in_future = time() + 4 * 86400 # timestamp 4 days from now + os.utime(temp_file, times=(time_in_future, time_in_future)) + + before_timestamp = time() + 3 * 86400 # timestamp 3 days from now + expired_files = get_expired_file_list(Path(temp_dir), before_timestamp) + assert len(expired_files) == 3 + + # assert number of files in temp_dir is 4 + assert len(list(Path(temp_dir).iterdir())) == 4 + + +# Test case when the directory does not exist +def test_directory_not_exists(): + """Test get_expired_file_list when the directory does not exist.""" + directory = Path("/path/that/does/not/exist") + before_timestamp = time() + expired_files = get_expired_file_list(directory, before_timestamp) + assert not expired_files + + +# Test case when the directory is not a directory +def test_directory_not_dir(temp_test_files): + """Test get_expired_file_list when the directory is not a directory.""" + _, temp_files = temp_test_files + before_timestamp = time() + expired_files = get_expired_file_list(temp_files[0], before_timestamp) + assert not expired_files + + +@pytest.fixture +def mock_path(): + """Return a MagicMock for the Path class.""" + # Create a MagicMock for the Path class to represent a file path + return MagicMock() + + +def test_remove_file_list_no_files(mock_path): + """Test remove_file_list when there are no files to remove.""" + # Arrange + # Let's assume the file list is empty, meaning there are no files to remove + file_list: List[Path] = [] + + # Act + remove_file_list(file_list) + + # Assert + # No interaction with the filesystem should occur + assert not mock_path.unlink.called + + +def test_remove_file_list_remove_files_successfully(mock_path): + """Test remove_file_list when unlink is successful.""" + # Arrange + # Let's assume we have three files in the file list + file_list = [mock_path, mock_path, mock_path] + + # Mock the unlink method to avoid actual filesystem interaction + patch.object(mock_path, "unlink") + + # Act + remove_file_list(file_list) + + # Assert + # unlink should have been called three times since there are three files in the list + assert mock_path.unlink.call_count == 3 + + +def test_remove_file_list_ignore_permission_error(mock_path): + """Test remove_file_list when unlink raises a PermissionError.""" + # Arrange + # Let's assume we have three files in the file list + file_list = [mock_path, mock_path, mock_path] + + # Mock the unlink method to raise a PermissionError + patch.object(mock_path, "unlink", side_effect=PermissionError) + + # Act + remove_file_list(file_list) + + # Assert + # unlink should have been called three times since there are three files in the list + assert mock_path.unlink.call_count == 3 + + +def test_remove_file_list_other_exception(mock_path): + """Test remove_file_list when unlink raises an exception other than PermissionError.""" + # Arrange + # Let's assume we have three files in the file list + file_list = [mock_path, mock_path, mock_path] + + # Mock the unlink method to raise an exception other than PermissionError + patch.object(mock_path, "unlink", side_effect=OSError) + + # Act + remove_file_list(file_list) + + # Assert + assert mock_path.unlink.call_count == 3 diff --git a/openbb_platform/core/tests/app/logs/utils/test_utils.py b/openbb_platform/core/tests/app/logs/utils/test_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..9cda5662e2f3ab63dfc438b51464b27ba1f32776 --- /dev/null +++ b/openbb_platform/core/tests/app/logs/utils/test_utils.py @@ -0,0 +1,102 @@ +"""OpenBB Platform Core tests.""" + +import uuid +from pathlib import Path +from unittest.mock import patch + +import pytest +from openbb_core.app.logs.utils.utils import get_app_id, get_log_dir, get_session_id + +## get_session_id + + +def test_get_session_id_return_type(): + """Test if the returned value is a string.""" + # Test if the returned value is a string + session_id = get_session_id() + assert isinstance(session_id, str) + + +def test_get_session_id_format(): + """Test if the returned string has the format "UUID-current_time.""" + # Test if the returned string has the format "UUID-current_time" + session_id = get_session_id() + + parts = session_id.split("-") + + assert len(parts) == 6 + + uuid_part = "-".join(parts[0:5]) + time_part = str(parts[5]) + + # Check if the first part (UUID) is a valid UUID + assert uuid.UUID(uuid_part) + + # Check if the second part (current_time) is numeric + assert int(time_part) + + +def test_get_session_id_uniqueness(): + """Test if subsequent calls return different session IDs.""" + # Test if subsequent calls return different session IDs + session_id1 = get_session_id() + session_id2 = get_session_id() + assert session_id1 != session_id2 + + +## get_app_id + + +def test_get_app_id_success(): + """Test get_app_id function.""" + # Mock the return value of get_log_dir to simulate a successful scenario + with patch("openbb_core.app.logs.utils.utils.get_log_dir") as mock_get_log_dir: + mock_get_log_dir # pylint: disable=pointless-statement + mock_get_log_dir.return_value = Path( + "/path/to/contextual_user_data_directory/app_id.log" + ) + app_id = get_app_id("/path/to/contextual_user_data_directory") + assert app_id == "app_id" + + +def test_get_app_id_os_error(): + """Test handling of OSError with errno 30.""" + # Test handling of OSError with errno 30 (Read-only file system) + with patch("openbb_core.app.logs.utils.utils.get_log_dir") as mock_get_log_dir: + mock_get_log_dir.side_effect = OSError(30, "Read-only file system") + with pytest.raises(OSError): + get_app_id("/path/to/contextual_user_data_directory") + + +def test_get_app_id_other_exception(): + """Test handling of other exceptions.""" + # Test handling of other exceptions + with patch("openbb_core.app.logs.utils.utils.get_log_dir") as mock_get_log_dir: + mock_get_log_dir.side_effect = Exception("Some other error") + with pytest.raises(Exception, match="Some other error"): + get_app_id("/path/to/contextual_user_data_directory") + + +## get_log_dir + + +def test_get_log_dir(): + """Test get_log_dir function.""" + with patch( + "openbb_core.app.logs.utils.utils.create_log_dir_if_not_exists", + return_value="/test_dir", + ) as mock_create_log_dir, patch( + "openbb_core.app.logs.utils.utils.create_log_uuid_if_not_exists", + return_value="12345", + ) as mock_create_log_uuid, patch( + "openbb_core.app.logs.utils.utils.create_uuid_dir_if_not_exists", + return_value="/test_dir/12345", + ) as mock_create_uuid_dir: + # Call the get_log_dir function + result = get_log_dir("contextual_user_data_directory") + + # Assertions + assert result == "/test_dir/12345" + mock_create_log_dir.assert_called_once_with("contextual_user_data_directory") + mock_create_log_uuid.assert_called_once_with("/test_dir") + mock_create_uuid_dir.assert_called_once_with("/test_dir", "12345") diff --git a/openbb_platform/core/tests/app/model/__init__.py b/openbb_platform/core/tests/app/model/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..55cb40ff9e8e017663d6bec31b52bd5fbe993077 --- /dev/null +++ b/openbb_platform/core/tests/app/model/__init__.py @@ -0,0 +1 @@ +"""OpenBB Platform Core tests.""" diff --git a/openbb_platform/core/tests/app/model/abstract/__init__.py b/openbb_platform/core/tests/app/model/abstract/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..55cb40ff9e8e017663d6bec31b52bd5fbe993077 --- /dev/null +++ b/openbb_platform/core/tests/app/model/abstract/__init__.py @@ -0,0 +1 @@ +"""OpenBB Platform Core tests.""" diff --git a/openbb_platform/core/tests/app/model/abstract/test_results.py b/openbb_platform/core/tests/app/model/abstract/test_results.py new file mode 100644 index 0000000000000000000000000000000000000000..ccdd5b8a03664a65ffe8d18c62935f73991569d2 --- /dev/null +++ b/openbb_platform/core/tests/app/model/abstract/test_results.py @@ -0,0 +1,15 @@ +"""Tests for the Results model.""" + +from openbb_core.app.model.abstract.results import Results +from pydantic import BaseModel + + +class MockResults(Results): + """Mock Results class.""" + + +def test_results_model(): + """Test the Results model.""" + res = MockResults() + + assert isinstance(res, BaseModel) diff --git a/openbb_platform/core/tests/app/model/abstract/test_singleton.py b/openbb_platform/core/tests/app/model/abstract/test_singleton.py new file mode 100644 index 0000000000000000000000000000000000000000..e586fb00e9d33f70272e8d2cfed78023b459ff35 --- /dev/null +++ b/openbb_platform/core/tests/app/model/abstract/test_singleton.py @@ -0,0 +1,40 @@ +"""Tests for the SingletonMeta metaclass.""" + +from openbb_core.app.model.abstract.singleton import SingletonMeta + + +class MyClass(metaclass=SingletonMeta): + """A simple class.""" + + def __init__(self, value): + """Initialize the class.""" + self.value = value + + +def test_singleton_instance_creation(): + """Test the SingletonMeta metaclass with instance creation.""" + # Arrange + instance1 = MyClass(42) + instance2 = MyClass(100) + + # Act & Assert + assert instance1 is instance2 + assert instance1.value == instance2.value + assert instance1.value == 42 + + +def test_singleton_multiple_classes(): + """Test the SingletonMeta metaclass with multiple classes.""" + + # Arrange + class AnotherClass(metaclass=SingletonMeta): + def __init__(self, data): + self.data = data + + instance1 = MyClass(42) + instance2 = AnotherClass("test") + + # Act & Assert + assert instance1 is not instance2 + assert instance1.value == 42 + assert instance2.data == "test" diff --git a/openbb_platform/core/tests/app/model/abstract/test_tagged.py b/openbb_platform/core/tests/app/model/abstract/test_tagged.py new file mode 100644 index 0000000000000000000000000000000000000000..7c40d15b675f71a904d220f3d8dc3c9a15b0eed2 --- /dev/null +++ b/openbb_platform/core/tests/app/model/abstract/test_tagged.py @@ -0,0 +1,18 @@ +"""Test the Tagged model.""" + +from openbb_core.app.model.abstract.tagged import Tagged + + +def test_tagged_model(): + """Test the Tagged model.""" + tagged = Tagged() + + assert hasattr(tagged, "id") + + +def test_fields(): + """Test the Tagged fields.""" + fields = Tagged.model_fields + fields_keys = fields.keys() + + assert "id" in fields_keys diff --git a/openbb_platform/core/tests/app/model/abstract/test_warning.py b/openbb_platform/core/tests/app/model/abstract/test_warning.py new file mode 100644 index 0000000000000000000000000000000000000000..3a506e6d658825c456cd15821922944b81a5d2b2 --- /dev/null +++ b/openbb_platform/core/tests/app/model/abstract/test_warning.py @@ -0,0 +1,41 @@ +"""Test the Warnings model.""" + +from unittest.mock import Mock + +import pytest +from openbb_core.app.model.abstract.warning import Warning_, cast_warning + + +@pytest.mark.parametrize( + "category, message", + [ + ("test", "test"), + ("test2", "test2"), + ], +) +def test_warn_model(category, message): + """Test the Warning_ model.""" + war = Warning_(category=category, message=message) + + assert war.category == category + assert war.message == message + + +def test_fields(): + """Test the Warning_ fields.""" + fields = Warning_.model_fields + fields_keys = fields.keys() + + assert "category" in fields_keys + assert "message" in fields_keys + + +def test_cast_warning(): + """Test the cast_warning function.""" + mock_warning_message = Mock() + mock_warning_message.category.__name__ = "test" + mock_warning_message.message = "test" + warning = cast_warning(mock_warning_message) + + assert warning.category == "test" + assert warning.message == "test" diff --git a/openbb_platform/core/tests/app/model/charts/__init__.py b/openbb_platform/core/tests/app/model/charts/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..55cb40ff9e8e017663d6bec31b52bd5fbe993077 --- /dev/null +++ b/openbb_platform/core/tests/app/model/charts/__init__.py @@ -0,0 +1 @@ +"""OpenBB Platform Core tests.""" diff --git a/openbb_platform/core/tests/app/model/charts/test_chart.py b/openbb_platform/core/tests/app/model/charts/test_chart.py new file mode 100644 index 0000000000000000000000000000000000000000..0d64ba2e35341d8df8dba1f710a18150cc0f6240 --- /dev/null +++ b/openbb_platform/core/tests/app/model/charts/test_chart.py @@ -0,0 +1,58 @@ +"""Test the chart model.""" + +import pytest +from openbb_core.app.model.charts.chart import Chart + + +def test_charting_default_values(): + """Test the charting default values.""" + # Arrange & Act + chart = Chart() + + # Assert + assert chart.content is None + assert chart.format is None + + +def test_charting_custom_values(): + """Test the charting custom values.""" + # Arrange + content = {"data": [1, 2, 3]} + chart_format = "plotly" + + # Act + chart = Chart(content=content, format=chart_format) + + # Assert + assert chart.content == content + assert chart.format == chart_format + + +def test_charting_assignment_validation(): + """Test the charting assignment validation.""" + # Arrange + chart = Chart() + + # Act & Assert + with pytest.raises(ValueError): + chart.invalid_field = "Invalid Value" + + +def test_charting_config_validation(): + """Test the charting config validation.""" + # Arrange + content = {"data": [1, 2, 3]} + chart_format = "plotly" + + chart = Chart(content=content, format=chart_format) + + with pytest.raises(ValueError): + chart.content = "Invalid Content" # type: ignore[assignment] + + assert chart.content == content + assert chart.format == chart_format + + +def test_show(): + """Test the show method.""" + # TODO : add test after the function is properly refactored diff --git a/openbb_platform/core/tests/app/model/hub/__init__.py b/openbb_platform/core/tests/app/model/hub/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..55cb40ff9e8e017663d6bec31b52bd5fbe993077 --- /dev/null +++ b/openbb_platform/core/tests/app/model/hub/__init__.py @@ -0,0 +1 @@ +"""OpenBB Platform Core tests.""" diff --git a/openbb_platform/core/tests/app/model/hub/test_hub_session.py b/openbb_platform/core/tests/app/model/hub/test_hub_session.py new file mode 100644 index 0000000000000000000000000000000000000000..19638f22cb90abdf82a02f89369d07b96c401dd7 --- /dev/null +++ b/openbb_platform/core/tests/app/model/hub/test_hub_session.py @@ -0,0 +1,37 @@ +"""Test the HubSession class.""" + +from openbb_core.app.model.hub.hub_session import HubSession +from pydantic import SecretStr + +# ruff: noqa: S105 S106 + + +def test_hub_session(): + """Test the HubSession class.""" + session = HubSession( + access_token=SecretStr("mock_access_token"), + token_type="mock_token_type", + email="mock_email", + user_uuid="mock_user_uuid", + username="mock_username", + primary_usage="mock_primary_usage", + ) + assert session.access_token.get_secret_value() == "mock_access_token" + assert session.token_type == "mock_token_type" + assert session.email == "mock_email" + assert session.user_uuid == "mock_user_uuid" + assert session.username == "mock_username" + assert session.primary_usage == "mock_primary_usage" + + +def test_fields(): + """Test the HubSession fields.""" + fields = HubSession.model_fields + fields_keys = fields.keys() + + assert "access_token" in fields_keys + assert "token_type" in fields_keys + assert "email" in fields_keys + assert "user_uuid" in fields_keys + assert "username" in fields_keys + assert "primary_usage" in fields_keys diff --git a/openbb_platform/core/tests/app/model/hub/test_hub_user_settings.py b/openbb_platform/core/tests/app/model/hub/test_hub_user_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..5fdcde411fae58d3912b16ed46dec0c3a9d1b571 --- /dev/null +++ b/openbb_platform/core/tests/app/model/hub/test_hub_user_settings.py @@ -0,0 +1,10 @@ +"""Test the HubUserSettings class.""" + +from openbb_core.app.model.hub.hub_user_settings import HubUserSettings + + +def test_hub_user_settings(): + """Test the HubUserSettings class.""" + hub_settings = HubUserSettings() + + assert isinstance(hub_settings, HubUserSettings) diff --git a/openbb_platform/core/tests/app/model/test_command_context.py b/openbb_platform/core/tests/app/model/test_command_context.py new file mode 100644 index 0000000000000000000000000000000000000000..57d982f58c2a3a4e9b1b620705473eb8f9b5929c --- /dev/null +++ b/openbb_platform/core/tests/app/model/test_command_context.py @@ -0,0 +1,24 @@ +"""Test the CommandContext model.""" + +from openbb_core.app.model.command_context import ( + CommandContext, + SystemSettings, + UserSettings, +) + + +def test_command_context(): + """Test the CommandContext model.""" + cc = CommandContext() + assert isinstance(cc, CommandContext) + assert isinstance(cc.user_settings, UserSettings) + assert isinstance(cc.system_settings, SystemSettings) + + +def test_fields(): + """Test the CommandContext fields.""" + fields = CommandContext.model_fields + fields_keys = fields.keys() + + assert "user_settings" in fields_keys + assert "system_settings" in fields_keys diff --git a/openbb_platform/core/tests/app/model/test_credentials.py b/openbb_platform/core/tests/app/model/test_credentials.py new file mode 100644 index 0000000000000000000000000000000000000000..52f0d25126c02fac348b9cecd2ec8ab50d0a90a4 --- /dev/null +++ b/openbb_platform/core/tests/app/model/test_credentials.py @@ -0,0 +1,27 @@ +"""Test the Credentials model.""" + +import typing +from unittest.mock import patch + + +def test_credentials(): + """Test the Credentials model.""" + with patch( + target="openbb_core.app.model.credentials.ProviderInterface" + ) as mock_provider_interface: + mock_provider_interface.credentials = { + "benzinga_api_key": (typing.Optional[str], None), + "polygon_api_key": (typing.Optional[str], None), + } + from openbb_core.app.model.credentials import ( # pylint: disable=import-outside-toplevel + Credentials, + ) + + creds = Credentials( + benzinga_api_key="mock_benzinga_api_key", + polygon_api_key="mock_polygon_api_key", + ) + + assert isinstance(creds, Credentials) + assert creds.benzinga_api_key.get_secret_value() == "mock_benzinga_api_key" + assert creds.polygon_api_key.get_secret_value() == "mock_polygon_api_key" diff --git a/openbb_platform/core/tests/app/model/test_defaults.py b/openbb_platform/core/tests/app/model/test_defaults.py new file mode 100644 index 0000000000000000000000000000000000000000..a64e318d5dcdd2f4dcabfa3f01394d35e05ecb90 --- /dev/null +++ b/openbb_platform/core/tests/app/model/test_defaults.py @@ -0,0 +1,15 @@ +"""Test the Defaults class.""" + +from openbb_core.app.model.defaults import Defaults + + +def test_defaults(): + """Test the Defaults class.""" + cc = Defaults(commands={"/equity/price": {"provider": "test"}}) + assert isinstance(cc, Defaults) + assert cc.commands == {"equity.price": {"provider": ["test"]}} + + +def test_fields(): + """Test the Defaults fields.""" + assert "commands" in Defaults.model_fields diff --git a/openbb_platform/core/tests/app/model/test_metadata.py b/openbb_platform/core/tests/app/model/test_metadata.py new file mode 100644 index 0000000000000000000000000000000000000000..37bf7377984d78106ae5237071ae66a70a7cb895 --- /dev/null +++ b/openbb_platform/core/tests/app/model/test_metadata.py @@ -0,0 +1,144 @@ +"""Test the Metadata model.""" + +from datetime import datetime + +import numpy as np +import pandas as pd +import pytest +from openbb_core.app.model.metadata import Metadata +from openbb_core.provider.abstract.data import Data + + +def test_Metadata(): + """Run Smoke test.""" + m = Metadata( + arguments={ + "provider_choices": {}, + "standard_params": {}, + "extra_params": {}, + }, + route="test", + timestamp=datetime.now(), + duration=0, + ) + assert m + assert isinstance(m, Metadata) + + +def test_fields(): + """Run Smoke test.""" + fields = Metadata.model_fields.keys() + assert "arguments" in fields + assert "duration" in fields + assert "route" in fields + assert "timestamp" in fields + + +@pytest.mark.parametrize( + "input_data, expected_output", + [ + # Test cases for various input types + ({"data": Data()}, {"data": {"type": "Data", "columns": []}}), + ( + {"data": Data(open=123, close=456)}, + {"data": {"type": "Data", "columns": ["open", "close"]}}, + ), + ( + {"data_list": [Data(open=123, close=456), Data(volume=789)]}, + { + "data_list": { + "type": "List[Data]", + "columns": ["open", "volume", "close"], + } + }, + ), + ( + {"data_list": [Data(open=123, close=456), Data(open=321, volume=789)]}, + { + "data_list": { + "type": "List[Data]", + "columns": ["open", "close", "volume"], + } + }, + ), + ( + {"data_frame": pd.DataFrame({"A": [1, 2], "B": [3, 4]})}, + {"data_frame": {"type": "DataFrame", "columns": ["A", "B"]}}, + ), + ( + { + "data_frame_list": [ + pd.DataFrame({"A": [1, 2], "B": [3, 4]}), + pd.DataFrame({"C": [5, 6]}), + ], + "data_series_list": [ + pd.Series([1, 2], name="X"), + pd.Series([3, 4], name="Y"), + ], + }, + { + "data_frame_list": { + "type": "List[DataFrame]", + "columns": [["A", "B"], ["C"]], + }, + "data_series_list": {"type": "List[Series]", "columns": ["X", "Y"]}, + }, + ), + ( + { + "numpy_array": np.array( + [(1, "Alice"), (2, "Bob")], dtype=[("id", int), ("name", "U10")] + ) + }, + {"numpy_array": {"type": "ndarray", "columns": ["id", "name"]}}, + ), + # Test case for long string input + ( + { + "long_string": "This is a very long string that exceeds 80 characters in length and should be trimmed." + }, + { + "long_string": "This is a very long string that exceeds 80 characters in length and should be tr" + }, + ), + ], +) +def test_scale_arguments(input_data, expected_output): + """Test the scale_arguments method.""" + kwargs = { + "provider_choices": {}, + "standard_params": {}, + "extra_params": input_data, + } + m = Metadata( + arguments=kwargs, + route="test", + timestamp=datetime.now(), + duration=0, + ) + arguments = m.arguments + + for arg in arguments: # pylint: disable=E1133 + if "columns" in arguments[arg]: + # compare the column names disregarding the order with the expected output + assert sorted(arguments["extra_params"][arg]["columns"]) == sorted( + expected_output[arg]["columns"] + ) + assert arguments[arg]["type"] == expected_output[arg]["type"] + else: + # assert m.arguments["extra_params"] == expected_output + keys = list(arguments["extra_params"].keys()) + expected_keys = list(expected_output.keys()) + assert sorted(keys) == sorted(expected_keys) + + for key in keys: + if "type" in arguments["extra_params"][key]: + assert ( + arguments["extra_params"][key]["type"] + == expected_output[key]["type"] + ) + assert sorted(arguments["extra_params"][key]["columns"]) == sorted( + expected_output[key]["columns"] + ) + else: + assert arguments["extra_params"][key] == expected_output[key] diff --git a/openbb_platform/core/tests/app/model/test_obbject.py b/openbb_platform/core/tests/app/model/test_obbject.py new file mode 100644 index 0000000000000000000000000000000000000000..a793080d88de5944696a0d858d1e0d50d2ccac36 --- /dev/null +++ b/openbb_platform/core/tests/app/model/test_obbject.py @@ -0,0 +1,392 @@ +"""Tests for the OBBject class.""" + +from unittest.mock import MagicMock + +import pandas as pd +import pytest +from openbb_core.app.model.obbject import Chart, OBBject, OpenBBError +from openbb_core.app.utils import basemodel_to_df +from openbb_core.provider.abstract.data import Data +from pandas.testing import assert_frame_equal + + +def test_OBBject(): + """Smoke test.""" + co: OBBject = OBBject() + assert isinstance(co, OBBject) + + +def test_fields(): + """Smoke test.""" + fields = OBBject.model_fields.keys() + + assert "results" in fields + assert "provider" in fields + assert "warnings" in fields + assert "chart" in fields + assert "extra" in fields + + +def test_to_dataframe_no_results(): + """Test helper.""" + co: OBBject = OBBject() + with pytest.raises(Exception): + co.to_dataframe() + + +class MockData(Data): + """Test helper.""" + + x: int + y: int + + +class MockMultiData(Data): + """Test helper.""" + + date: str + another_date: str + value: float + + +class MockDataFrame(Data): + """Test helper.""" + + date: str + value: float + + +@pytest.mark.parametrize( + "results, expected_df", + [ + # Test case 1: Normal results with "date" column + ( + [{"date": "2023-07-30", "value": 10}, {"date": "2023-07-31", "value": 20}], + pd.DataFrame( + [ + {"date": "2023-07-30", "value": 10}, + {"date": "2023-07-31", "value": 20}, + ], + ), + ), + # Test case 2: Normal results without "date" column + ( + [{"value": 10}, {"value": 20}], + pd.DataFrame({"value": [10, 20]}, index=pd.RangeIndex(start=0, stop=2)), + ), + # Test case 3: List of Data + ( + [ + MockData(x=0, y=2), + MockData(x=1, y=3), + MockData(x=2, y=0), + MockData(x=3, y=1), + MockData(x=4, y=6), + ], + pd.DataFrame( + {"x": [0, 1, 2, 3, 4], "y": [2, 3, 0, 1, 6]}, columns=["x", "y"] + ), + ), + # Test case 4: List of dict + ( + [ + {"a": 1, "y": 2}, + {"a": 1, "y": 3}, + {"a": 2, "y": 0}, + {"a": 3, "y": 1}, + {"a": 4, "y": 6}, + ], + pd.DataFrame( + {"a": [1, 1, 2, 3, 4], "y": [2, 3, 0, 1, 6]}, columns=["a", "y"] + ), + ), + # Test case 5: List of Lists + ( + [[0, 1], [1, 3], [2, 0], [3, 1], [4, 6]], + pd.DataFrame([[0, 1], [1, 3], [2, 0], [3, 1], [4, 6]]), + ), + # Test case 6: List of Tuples + ( + [(3, 2), (1, 3), (2, 0), (3, 1), (4, 6)], + pd.DataFrame([(3, 2), (1, 3), (2, 0), (3, 1), (4, 6)]), + ), + # Test case 7: List of Strings + ( + ["YOLO2", "YOLO3", "YOLO0", "YOLO1", "YOLO6"], + pd.DataFrame(["YOLO2", "YOLO3", "YOLO0", "YOLO1", "YOLO6"]), + ), + # Test case 7: List of Numbers + ( + [1, 0.42, 12321298, 129387129387192837, 0.000000123], + pd.DataFrame([1, 0.42, 12321298, 129387129387192837, 0.000000123]), + ), + # Test case 7: Dict of Dicts + ( + { + "0": {"x": 0, "y": 2}, + "1": {"x": 1, "y": 3}, + "2": {"x": 2, "y": 0}, + "3": {"x": 3, "y": 1}, + "4": {"x": 4, "y": 6}, + }, + pd.DataFrame.from_dict( + { + "0": {"x": 0, "y": 2}, + "1": {"x": 1, "y": 3}, + "2": {"x": 2, "y": 0}, + "3": {"x": 3, "y": 1}, + "4": {"x": 4, "y": 6}, + }, + orient="index", + ), + ), + # Test case 8: Dict of Lists + ( + {"0": [0, 2], "1": [1, 3], "2": [2, 0], "3": [3, 1], "4": [4, 6]}, + pd.DataFrame.from_dict( + {"0": [0, 2], "1": [1, 3], "2": [2, 0], "3": [3, 1], "4": [4, 6]}, + orient="index", + ), + ), + # Test case 9: List of dict of data + ( + [ + { + "df1": [ + MockMultiData( + date="1956-01-01", another_date="2023-09-01", value=0.0 + ), + MockMultiData( + date="1956-02-01", another_date="2023-09-01", value=0.0 + ), + MockMultiData( + date="1956-03-01", another_date="2023-09-01", value=0.0 + ), + ], + "df2": [ + MockMultiData( + date="1955-03-01", another_date="2023-09-01", value=0.0 + ), + MockMultiData( + date="1955-04-01", another_date="2023-09-01", value=0.0 + ), + MockMultiData( + date="1955-05-01", another_date="2023-09-01", value=0.0 + ), + ], + } + ], + pd.concat( + { + "df1": pd.DataFrame( + { + "date": [ + pd.to_datetime("1956-01-01").date(), + pd.to_datetime("1956-02-01").date(), + pd.to_datetime("1956-03-01").date(), + ], + "another_date": ["2023-09-01", "2023-09-01", "2023-09-01"], + "value": [0.0, 0.0, 0.0], + }, + columns=["date", "another_date", "value"], + ), + "df2": pd.DataFrame( + { + "date": [ + pd.to_datetime("1955-03-01").date(), + pd.to_datetime("1955-04-01").date(), + pd.to_datetime("1955-05-01").date(), + ], + "another_date": ["2023-09-01", "2023-09-01", "2023-09-01"], + "value": [0.0, 0.0, 0.0], + }, + columns=["date", "another_date", "value"], + ), + }, + axis=1, + sort=True, + ), + ), + # Test case 10: Empty results + ([], OpenBBError("Results not found.")), + # Test case 11: Results as None, should raise OpenBBError + (None, OpenBBError("Results not found.")), + ], +) +def test_to_dataframe(results, expected_df): + """Test helper.""" + # Arrange + co: OBBject = OBBject(results=results) + + # Act and Assert + if isinstance(expected_df, pd.DataFrame): + result = co.to_dataframe(index=None) + assert_frame_equal(result, expected_df) + else: + with pytest.raises(expected_df.__class__) as exc_info: + co.to_dataframe(index=None) + + assert str(exc_info.value) == str(expected_df) + + +@pytest.mark.parametrize( + "results, index, sort_by", + [ + # Test case 1: Normal results with "date" column + ( + [{"date": "2023-07-30", "value": 10}, {"date": "2023-07-31", "value": 20}], + "date", + "value", + ), + # Test case 2: List of Data + ( + [ + MockData(x=0, y=2), + MockData(x=1, y=3), + MockData(x=2, y=0), + MockData(x=3, y=1), + MockData(x=4, y=6), + ], + "x", + "y", + ), + ], +) +def test_to_dataframe_w_args(results, index, sort_by): + """Test helper.""" + # Arrange + co: OBBject = OBBject(results=results) + + # Act and Assert + result = co.to_dataframe(index=index, sort_by=sort_by) + assert isinstance(result, pd.DataFrame) + assert result.index.name == index + + # check if dataframe is properly sorted + assert result[sort_by].is_monotonic_increasing + + +@pytest.mark.parametrize( + "results", + # Test case 1: List of models with daylight savings crossover. + ( + [ + MockDataFrame(date="2023-11-03 00:00:00-04:00", value=10), + MockDataFrame(date="2023-11-03 08:00:00-04:00", value=9), + MockDataFrame(date="2023-11-03 16:00:00-04:00", value=8), + MockDataFrame(date="2023-11-06 00:00:00-05:00", value=11), + MockDataFrame(date="2023-11-06 08:00:00-05:00", value=7), + MockDataFrame(date="2023-11-06 16:00:00-05:00", value=12), + ], + ), +) +def test_to_df_daylight_savings(results): + """Test helper.""" + # Arrange + co: OBBject = OBBject(results=results) + + # Act and Assert + expected_df = basemodel_to_df(results, index="date") + result = co.to_dataframe(index="date") + assert isinstance(result, pd.DataFrame) + assert_frame_equal(expected_df, result) + + +@pytest.mark.parametrize( + "results, expected_dict", + [ # Case 1: Normal results with "date" column + ( + [{"date": "2023-07-30", "value": 10}, {"date": "2023-07-31", "value": 20}], + {"date": ["2023-07-30", "2023-07-31"], "value": [10, 20]}, + ), + # Case 2: Normal results without "date" column + ( + [{"value": 10}, {"value": 20}], + {"value": [10, 20]}, + ), + # Test case 3: Dict of lists + ( + {"0": [0, 2], "1": [1, 3], "2": [2, 0], "3": [3, 1], "4": [4, 6]}, + {0: [0, 1, 2, 3, 4], 1: [2, 3, 0, 1, 6]}, + ), + # Test case 4: No results + ([], OpenBBError("Results not found.")), + # Test case 5: Results as None, should raise OpenBBError + (None, OpenBBError("Results not found.")), + # Test case 6: List of tuples + ( + [(3, 2), (1, 3), (2, 0), (3, 1), (4, 6)], + {0: [3, 1, 2, 3, 4], 1: [2, 3, 0, 1, 6]}, + ), + # Test case 7: List of Strings + ( + ["YOLO2", "YOLO3", "YOLO0", "YOLO1", "YOLO6"], + {0: ["YOLO2", "YOLO3", "YOLO0", "YOLO1", "YOLO6"]}, + ), + # Test case 8: List of Numbers + ( + [1, 0.42, 12321, 1293, 0.00123], + {0: [1, 0.42, 12321, 1293, 0.00123]}, + ), + # Test case 9: Dict of Dicts + ( + { + "0": {"x": 0, "y": 2}, + "1": {"x": 1, "y": 3}, + "2": {"x": 2, "y": 0}, + "3": {"x": 3, "y": 1}, + "4": {"x": 4, "y": 6}, + }, + {"0": [0, 2], "1": [1, 3], "2": [2, 0], "3": [3, 1], "4": [4, 6]}, + ), + ], +) +def test_to_dict(results, expected_dict): + """Test helper.""" + # Arrange + co: OBBject = OBBject(results=results) + + # Act and Assert + if isinstance(expected_dict, (list, dict)): + result = co.to_dict() + assert result == expected_dict + else: + with pytest.raises(expected_dict.__class__) as exc_info: + co.to_dict() + + assert str(exc_info.value) == str(expected_dict) + + +def test_show_chart_exists(): + """Test helper.""" + mock_instance: OBBject = OBBject() + # Arrange + mock_instance.chart = MagicMock(spec=Chart) + mock_instance.chart.fig = MagicMock() + mock_instance.chart.fig.show.return_value = MagicMock() + + # Act + mock_instance.show() + + # Assert + mock_instance.chart.fig.show.assert_called_once() # pylint: disable=no-member + + +def test_show_chart_no_chart(): + """Test helper.""" + mock_instance: OBBject = OBBject() + + # Act and Assert + with pytest.raises(OpenBBError, match="Chart not found."): + mock_instance.show() + + +def test_show_chart_no_fig(): + """Test helper.""" + mock_instance: OBBject = OBBject() + # Arrange + mock_instance.chart = Chart() + + # Act and Assert + with pytest.raises(OpenBBError, match="Chart not found."): + mock_instance.show() diff --git a/openbb_platform/core/tests/app/model/test_preferences.py b/openbb_platform/core/tests/app/model/test_preferences.py new file mode 100644 index 0000000000000000000000000000000000000000..dacfc708ed911035b3eae0f2b9ea0d659a8e485a --- /dev/null +++ b/openbb_platform/core/tests/app/model/test_preferences.py @@ -0,0 +1,9 @@ +"""Test the preferences class.""" + +from openbb_core.app.model.preferences import Preferences + + +def test_preferences(): + """Test the preferences class.""" + preferences = Preferences() + assert isinstance(preferences, Preferences) diff --git a/openbb_platform/core/tests/app/model/test_profile.py b/openbb_platform/core/tests/app/model/test_profile.py new file mode 100644 index 0000000000000000000000000000000000000000..7f8a4795327f810efb0dd5f365d488616b9065ed --- /dev/null +++ b/openbb_platform/core/tests/app/model/test_profile.py @@ -0,0 +1,9 @@ +"""Test the profile class.""" + +from openbb_core.app.model.profile import Profile + + +def test_preferences(): + """Test the preferences class.""" + preferences = Profile() + assert isinstance(preferences, Profile) diff --git a/openbb_platform/core/tests/app/model/test_system_settings.py b/openbb_platform/core/tests/app/model/test_system_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..1ead135bfe4338d096cc62a0f5646b6f58c85e6b --- /dev/null +++ b/openbb_platform/core/tests/app/model/test_system_settings.py @@ -0,0 +1,174 @@ +"""Tests for the SystemSettings class.""" + +import os +from pathlib import Path + +import pytest +from openbb_core.app.model.system_settings import SystemSettings +from pydantic import BaseModel, ConfigDict + + +class MockSystemSettings(BaseModel): + """Mock SystemSettings.""" + + model_config = ConfigDict(extra="allow", populate_by_name=True) + + +def test_system_settings(): + """Test the SystemSettings class.""" + sys = SystemSettings() + assert isinstance(sys, SystemSettings) + + +def test_create_openbb_directory_directory_and_files_not_exist(tmpdir): + """Test the create_openbb_directory method.""" + # Arrange + values = MockSystemSettings( + **{ + "openbb_directory": str(tmpdir.join("openbb")), + "user_settings_path": str(tmpdir.join("user_settings.json")), + "system_settings_path": str(tmpdir.join("system_settings.json")), + } + ) + + # Act + SystemSettings.create_openbb_directory(values) # type: ignore[operator] + + # Assert + assert os.path.exists(values.openbb_directory) # type: ignore[attr-defined] + assert os.path.exists(values.user_settings_path) # type: ignore[attr-defined] + assert os.path.exists(values.system_settings_path) # type: ignore[attr-defined] + + +def test_create_openbb_directory_directory_exists_user_settings_missing(tmpdir): + """Test the create_openbb_directory method.""" + # Arrange + values = MockSystemSettings( + **{ + "openbb_directory": str(tmpdir.join("openbb")), + "user_settings_path": str(tmpdir.join("user_settings.json")), + "system_settings_path": str(tmpdir.join("system_settings.json")), + } + ) + + # Create the openbb directory + Path(values.openbb_directory).mkdir(parents=True, exist_ok=True) # type: ignore[attr-defined] + + # Act + SystemSettings.create_openbb_directory(values) # type: ignore[operator] + + # Assert + assert os.path.exists(values.openbb_directory) # type: ignore[attr-defined] + assert os.path.exists(values.user_settings_path) # type: ignore[attr-defined] + assert os.path.exists(values.system_settings_path) # type: ignore[attr-defined] + + +def test_create_openbb_directory_directory_exists_system_settings_missing(tmpdir): + """Test the create_openbb_directory method.""" + # Arrange + values = MockSystemSettings( + **{ + "openbb_directory": str(tmpdir.join("openbb")), + "user_settings_path": str(tmpdir.join("user_settings.json")), + "system_settings_path": str(tmpdir.join("system_settings.json")), + } + ) + + # Create the openbb directory + Path(values.openbb_directory).mkdir(parents=True, exist_ok=True) # type: ignore[attr-defined] + + # Create the user_settings.json file + with open(values.user_settings_path, "w") as f: # type: ignore[attr-defined] + f.write("{}") + + # Act + SystemSettings.create_openbb_directory(values) # type: ignore[operator] + + # Assert + assert os.path.exists(values.openbb_directory) # type: ignore[attr-defined] + assert os.path.exists(values.user_settings_path) # type: ignore[attr-defined] + assert os.path.exists(values.system_settings_path) # type: ignore[attr-defined] + + +@pytest.mark.parametrize( + "values, expected_handlers", + [ + # Test case: test_mode is True, logging_suppress is True + ( + { + "debug_mode": True, + "test_mode": True, + "logging_suppress": True, + "log_collect": True, + "logging_handlers": [], + }, + [], + ), + # Test case: test_mode is False, logging_suppress is True + ( + { + "debug_mode": False, + "test_mode": False, + "logging_suppress": True, + "log_collect": True, + "logging_handlers": [], + }, + [], + ), + # Test case: test_mode is False, logging_suppress is False, log_collect is True, + # and "posthog" handler is not present in logging_handlers + ( + { + "debug_mode": False, + "test_mode": False, + "logging_suppress": False, + "log_collect": True, + "logging_handlers": ["file", "console"], + }, + ["file", "console", "posthog"], + ), + # Test case: test_mode is False, logging_suppress is False, log_collect is True, + # and "posthog" handler is already present in logging_handlers + ( + { + "debug_mode": False, + "test_mode": False, + "logging_suppress": False, + "log_collect": True, + "logging_handlers": ["file", "console", "posthog"], + }, + ["file", "console", "posthog"], + ), + ], +) +def test_validate_posthog_handler(values, expected_handlers): + """Test the validate_posthog_handler method.""" + values = MockSystemSettings(**values) + # Act + result = SystemSettings.validate_posthog_handler(values) # type: ignore[operator] + + # Assert + assert result.logging_handlers == expected_handlers + + +@pytest.mark.parametrize( + "handlers, valid", + [ + # Test case: Valid handlers + (["stdout", "file", "noop"], True), + # Test case: Invalid handler + (["stdout", "invalid_handler", "file"], False), + # Test case: Empty list of handlers + ([], True), + # Test case: Repeated valid handlers + (["stdout", "stderr", "stdout", "noop", "stderr"], True), + ], +) +def test_validate_logging_handlers(handlers, valid): + """Test the validate_logging_handlers method.""" + # Act and Assert + if valid: + assert SystemSettings.validate_logging_handlers(handlers) == handlers # type: ignore[call-arg] + else: + with pytest.raises(ValueError, match="Invalid logging handler"): + SystemSettings.validate_logging_handlers(handlers) # type: ignore[call-arg] diff --git a/openbb_platform/core/tests/app/model/test_user_settings.py b/openbb_platform/core/tests/app/model/test_user_settings.py new file mode 100644 index 0000000000000000000000000000000000000000..ebc78faf4d99c16836ed9df4859e7c33d773b70d --- /dev/null +++ b/openbb_platform/core/tests/app/model/test_user_settings.py @@ -0,0 +1,18 @@ +"""Test the UserSettings model.""" + +from openbb_core.app.model.credentials import Credentials +from openbb_core.app.model.defaults import Defaults +from openbb_core.app.model.preferences import Preferences +from openbb_core.app.model.profile import Profile +from openbb_core.app.model.user_settings import UserSettings + + +def test_user_settings(): + """Test the UserSettings model.""" + settings = UserSettings( + credentials=Credentials(), + profile=Profile(), + preferences=Preferences(), + defaults=Defaults(), + ) + assert isinstance(settings, UserSettings) diff --git a/openbb_platform/core/tests/app/results/__init__.py b/openbb_platform/core/tests/app/results/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..55cb40ff9e8e017663d6bec31b52bd5fbe993077 --- /dev/null +++ b/openbb_platform/core/tests/app/results/__init__.py @@ -0,0 +1 @@ +"""OpenBB Platform Core tests.""" diff --git a/openbb_platform/core/tests/app/results/test_empty.py b/openbb_platform/core/tests/app/results/test_empty.py new file mode 100644 index 0000000000000000000000000000000000000000..5ce63f4a5c50e56f72b4b157dca69b16b6842a3c --- /dev/null +++ b/openbb_platform/core/tests/app/results/test_empty.py @@ -0,0 +1,12 @@ +"""Test the Empty model.""" + +from openbb_core.app.model.results.empty import Empty +from pydantic import BaseModel + + +def test_empty_model(): + """Test the Empty model.""" + empty = Empty() + + assert isinstance(empty, Empty) + assert isinstance(empty, BaseModel) diff --git a/openbb_platform/core/tests/app/service/__init__.py b/openbb_platform/core/tests/app/service/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..55cb40ff9e8e017663d6bec31b52bd5fbe993077 --- /dev/null +++ b/openbb_platform/core/tests/app/service/__init__.py @@ -0,0 +1 @@ +"""OpenBB Platform Core tests.""" diff --git a/openbb_platform/core/tests/app/service/test_hub_service.py b/openbb_platform/core/tests/app/service/test_hub_service.py new file mode 100644 index 0000000000000000000000000000000000000000..77f6479dad2cec419caf156df74b577eacbe34f3 --- /dev/null +++ b/openbb_platform/core/tests/app/service/test_hub_service.py @@ -0,0 +1,375 @@ +"""Test the hub_service.py module.""" + +# pylint: disable=W0212 +# ruff: noqa: S105 S106 + + +from pathlib import Path +from time import time +from unittest.mock import MagicMock, patch + +import pytest +from jwt import encode +from openbb_core.app.model.defaults import Defaults +from openbb_core.app.service.hub_service import ( + Credentials, + HubService, + HubSession, + HubUserSettings, + OpenBBError, +) +from pydantic import SecretStr + + +@pytest.fixture +def mocker(): + """Fixture for mocker.""" + with patch("openbb_core.app.service.hub_service.HubService") as mock: + yield mock + + +def test_v3tov4_map(): + """Test v3 to v4 map.""" + + v3_keys = { + "databento": "API_DATABENTO_KEY", + "alpha_vantage": "API_KEY_ALPHAVANTAGE", + "fmp": "API_KEY_FINANCIALMODELINGPREP", + "nasdaq": "API_KEY_QUANDL", + "polygon": "API_POLYGON_KEY", + "fred": "API_FRED_KEY", + "news_api": "API_NEWS_TOKEN", + "biztoc": "API_BIZTOC_TOKEN", + "cmc": "API_CMC_KEY", + "finnhub": "API_FINNHUB_KEY", + "whale_alert": "API_WHALE_ALERT_KEY", + "glassnode": "API_GLASSNODE_KEY", + "coinglass": "API_COINGLASS_KEY", + "ethplorer": "API_ETHPLORER_KEY", + "cryptopanic": "API_CRYPTO_PANIC_KEY", + "crypto_panic": "API_CRYPTO_PANIC_KEY", # If dev choses to use this name + "bitquery": "API_BITQUERY_KEY", + "smartstake": ["API_SMARTSTAKE_KEY", "API_SMARTSTAKE_TOKEN"], + "messari": "API_MESSARI_KEY", + "shroom": "API_SHROOM_KEY", + "santiment": "API_SANTIMENT_KEY", + "eodhd": "API_EODHD_KEY", + "tokenterminal": "API_TOKEN_TERMINAL_KEY", + "token_terminal": "API_TOKEN_TERMINAL_KEY", # If dev choses to use this name + "intrinio": "API_INTRINIO_KEY", + "github": "API_GITHUB_KEY", + "reddit": [ + "API_REDDIT_CLIENT_ID", + "API_REDDIT_CLIENT_SECRET", + "API_REDDIT_USERNAME", + "API_REDDIT_USER_AGENT", + "API_REDDIT_PASSWORD", + ], + "companies_house": "API_COMPANIESHOUSE_KEY", + "companieshouse": "API_COMPANIESHOUSE_KEY", # If dev choses to use this name + "dappradar": "API_DAPPRADAR_KEY", + "nixtla": "API_KEY_NIXTLA", + } + + providers = sorted( + [ + p.stem + for p in Path("openbb_platform", "providers").glob("*") + if p.is_dir() and p.name not in ("__pycache__", "tests") + ] + ) + + for provider in providers: + if provider in v3_keys: + keys = v3_keys[provider] + keys_list = keys if isinstance(keys, list) else [keys] + for k in keys_list: + assert k.lower() in HubService.V3TOV4 + + +def test_connect_with_email_password(): + """Test connect with email and password.""" + mock_hub_session = MagicMock(spec=HubSession) + with patch( + "requests.post", return_value=MagicMock(status_code=200, json=lambda: {}) + ), patch.object( + HubService, + "_get_session_from_email_password", + return_value=mock_hub_session, + ): + hub_service = HubService() + result = hub_service.connect(email="test@example.com", password="password") + + assert result == mock_hub_session + assert hub_service.session == mock_hub_session + + +def test_connect_with_sdk_token(): + """Test connect with Platform personal access token.""" + mock_hub_session = MagicMock(spec=HubSession) + with patch( + "requests.post", return_value=MagicMock(status_code=200, json=lambda: {}) + ), patch.object( + HubService, "_get_session_from_platform_token", return_value=mock_hub_session + ): + hub_service = HubService() + result = hub_service.connect(pat="pat") + + assert result == mock_hub_session + assert hub_service.session == mock_hub_session + + +def test_connect_without_credentials(): + """Test connect without credentials.""" + hub_service = HubService() + with pytest.raises( + OpenBBError, match="Please provide 'email' and 'password' or 'pat'" + ): + hub_service.connect() + + +def test_get_session_from_email_password(): + """Test get session from email and password.""" + mock_hub_session = MagicMock(spec=HubSession) + with patch( + "requests.post", + return_value=MagicMock( + status_code=200, + json=lambda: { + "access_token": "token", + "token_type": "Bearer", + "uuid": "uuid", + "email": "email", + "username": "username", + "primary_usage": "primary_usage", + }, + ), + ), patch.object( + HubService, + "_get_session_from_email_password", + return_value=mock_hub_session, + ): + hub_service = HubService() + result = hub_service._get_session_from_email_password("email", "password") + assert isinstance(result, HubSession) + + +def test_get_session_from_platform_token(): + """Test get session from Platform personal access token.""" + mock_hub_session = MagicMock(spec=HubSession) + with patch( + "requests.post", + return_value=MagicMock( + status_code=200, + json=lambda: { + "access_token": "token", + "token_type": "Bearer", + "uuid": "uuid", + "username": "username", + "email": "email", + "primary_usage": "primary_usage", + }, + ), + ), patch.object( + HubService, + "_get_session_from_platform_token", + return_value=mock_hub_session, + ): + mock_token = ( + "eyJ0eXAiOiJKV1QiLCJhbGciOiJFUzI1NiIsImtpZCI6ImRiMjEyZDdhZj" + "c2MWI0ZTNlOGNjZGM3OWQ5Zjk4YWM5In0.eyJhY2Nlc3NfdG9rZW4iOiJ0" + "b2tlbiIsInRva2VuX3R5cGUiOiJCZWFyZXIiLCJ1dWlkIjoidXVpZCIsInV" + "zZXJuYW1lIjoidXNlcm5hbWUiLCJlbWFpbCI6ImVtYWlsIiwicHJpbWFyeV9" + "1c2FnZSI6InByaW1hcnlfdXNhZ2UifQ.FAtE8-a1a-313Zoa6dREIxGZOHaW9" + "-JLZnFzyJ6dlHBZnkjQT2tfaaefxnTdAlSmToQwxGykvuatmI7L0wztPQ" + ) + + result = HubService()._get_session_from_platform_token(mock_token) + assert isinstance(result, HubSession) + + +def test_disconnect(): + """Test disconnect.""" + with patch( + "requests.get", + return_value=MagicMock( + status_code=200, + json=lambda: {"success": True}, + ), + ), patch.object( + HubService, + "_post_logout", + return_value=True, + ): + mock_hub_session = MagicMock( + spec=HubSession, access_token=SecretStr("token"), token_type="Bearer" + ) + hub_service = HubService(mock_hub_session) + + assert hub_service.disconnect() is True + assert hub_service.session is None + + +def test_get_user_settings(): + """Test get user settings.""" + with patch( + "requests.get", + return_value=MagicMock( + status_code=200, + json=lambda: {}, + ), + ), patch.object( + HubService, + "_get_user_settings", + return_value=MagicMock(spec=HubUserSettings), + ): + mock_hub_session = MagicMock( + spec=HubSession, access_token=SecretStr("token"), token_type="Bearer" + ) + hub_service = HubService(mock_hub_session) + user_settings = hub_service._get_user_settings() + assert isinstance(user_settings, HubUserSettings) + + +def test_put_user_settings(): + """Test put user settings.""" + + with patch( + "requests.put", + return_value=MagicMock( + status_code=200, + ), + ), patch.object( + HubService, + "_put_user_settings", + return_value=True, + ): + mock_hub_session = MagicMock( + spec=HubSession, access_token=SecretStr("token"), token_type="Bearer" + ) + mock_user_settings = MagicMock(spec=HubUserSettings) + + hub_service = HubService(mock_hub_session) + assert ( + hub_service._put_user_settings(mock_hub_session, mock_user_settings) is True + ) + + +def test_hub2platform_v4_only(): + """Test hub2platform.""" + mock_user_settings = MagicMock(spec=HubUserSettings) + mock_user_settings.features_keys = { + "fmp_api_key": "abc", + "polygon_api_key": "def", + "fred_api_key": "ghi", + } + mock_user_settings.features_settings = {} + + credentials, _ = HubService().hub2platform(mock_user_settings) + assert isinstance(credentials, Credentials) + assert credentials.fmp_api_key.get_secret_value() == "abc" + assert credentials.polygon_api_key.get_secret_value() == "def" + assert credentials.fred_api_key.get_secret_value() == "ghi" + + +def test_hub2platform_v3_only(): + """Test hub2platform.""" + mock_user_settings = MagicMock(spec=HubUserSettings) + mock_user_settings.features_keys = { + "api_key_financialmodelingprep": "abc", + "api_polygon_key": "def", + "api_fred_key": "ghi", + } + mock_user_settings.features_settings = {} + + credentials, _ = HubService().hub2platform(mock_user_settings) + assert isinstance(credentials, Credentials) + assert credentials.fmp_api_key.get_secret_value() == "abc" + assert credentials.polygon_api_key.get_secret_value() == "def" + assert credentials.fred_api_key.get_secret_value() == "ghi" + + +def test_hub2platform_v3v4(): + """Test hub2platform.""" + mock_user_settings = MagicMock(spec=HubUserSettings) + mock_user_settings.features_keys = { + "api_key_financialmodelingprep": "abc", + "fmp_api_key": "other_key", + "api_polygon_key": "def", + "api_fred_key": "ghi", + } + mock_user_settings.features_settings = {} + + credentials, _ = HubService().hub2platform(mock_user_settings) + assert isinstance(credentials, Credentials) + assert credentials.fmp_api_key.get_secret_value() == "other_key" + assert credentials.polygon_api_key.get_secret_value() == "def" + assert credentials.fred_api_key.get_secret_value() == "ghi" + + +def test_platform2hub(): + """Test platform2hub.""" + mock_user_settings = MagicMock(spec=HubUserSettings) + mock_user_settings.features_keys = { # Received from Hub + "api_key_financialmodelingprep": "abc", + "fmp_api_key": "other_key", + "api_fred_key": "ghi", + } + mock_user_settings.features_settings = {} + mock_hub_service = HubService() + mock_hub_service._hub_user_settings = mock_user_settings + mock_credentials = Credentials( # Current credentials + fmp_api_key=SecretStr("fmp"), + polygon_api_key=SecretStr("polygon"), + fred_api_key=SecretStr("fred"), + benzinga_api_key=SecretStr("benzinga"), + some_api_key=SecretStr("some"), + ) + mock_defaults = Defaults() + user_settings = mock_hub_service.platform2hub(mock_credentials, mock_defaults) + + assert isinstance(user_settings, HubUserSettings) + assert user_settings.features_keys["api_key_financialmodelingprep"] == "fmp" + assert user_settings.features_keys["fmp_api_key"] == "other_key" + assert user_settings.features_keys["polygon_api_key"] == "polygon" + assert user_settings.features_keys["api_fred_key"] == "fred" + assert user_settings.features_keys["benzinga_api_key"] == "benzinga" + assert "some_api_key" in user_settings.features_keys + assert "defaults" in user_settings.features_settings + + +@pytest.mark.parametrize( + "offset, message", + [ + # valid + ( + 100, + None, + ), + # expired + ( + 0, + "Platform personal access token expired.", + ), + # invalid + (None, "Failed to decode Platform token."), + ], +) +def test__check_token_expiration(offset, message): + """Test check token expiration function.""" + + token = ( + encode( + {"some": "payload", "exp": int(time()) + offset}, + "secret", + algorithm="HS256", + ) + if offset is not None + else "invalid_token" + ) + + if message: + with pytest.raises(OpenBBError, match=message): + HubService._check_token_expiration(token) + else: + HubService._check_token_expiration(token) diff --git a/openbb_platform/core/tests/app/service/test_system_service.py b/openbb_platform/core/tests/app/service/test_system_service.py new file mode 100644 index 0000000000000000000000000000000000000000..a563a1a37711ad8cdc2e17bf9b256e49c86878da --- /dev/null +++ b/openbb_platform/core/tests/app/service/test_system_service.py @@ -0,0 +1,57 @@ +"""Test the system_service.py module.""" + +# pylint: disable=redefined-outer-name + +import pytest +from openbb_core.app.service.system_service import SystemService + + +@pytest.fixture +def system_service(): + """Fixture for system service.""" + return SystemService() + + +def test_system_service_init(system_service): + """Test system service init.""" + assert system_service + + +def test_read_from_file(system_service): + """Test read default system settings.""" + # pylint: disable=protected-access + system_settings = system_service._read_from_file() + + assert system_settings + + +def test_write_to_file(system_service): + """Test write default system settings.""" + # pylint: disable=protected-access + system_settings = system_service._read_from_file() + system_service.write_to_file(system_settings=system_settings) + + assert system_service + + +def test_system_settings(system_service): + """Test system settings.""" + system_settings = system_service.system_settings + + assert system_settings + + +def test_system_settings_setter(system_service): + """Test system settings setter.""" + system_settings = system_service.system_settings + + system_service.system_settings = system_settings + + assert system_service.system_settings == system_settings + + +def test_refresh_system_settings(system_service): + """Test refresh system settings.""" + system_settings = system_service.refresh_system_settings() + + assert system_settings diff --git a/openbb_platform/core/tests/app/service/test_user_service.py b/openbb_platform/core/tests/app/service/test_user_service.py new file mode 100644 index 0000000000000000000000000000000000000000..f00ac2a347fd3125d24e95840201c0b7248fd5a9 --- /dev/null +++ b/openbb_platform/core/tests/app/service/test_user_service.py @@ -0,0 +1,61 @@ +"""Test the user_service.py module.""" + +import json +import tempfile +from pathlib import Path + +from openbb_core.app.service.user_service import ( + UserService, + UserSettings, +) + + +def test_read_from_file_file_exists(): + """Test read default user settings.""" + result = UserService.read_from_file(path=Path("some_path")) + + assert result + assert isinstance(result, UserSettings) + + +def test_write_to_file(): + """Test write default user settings.""" + # Create a temporary file for this test + with tempfile.NamedTemporaryFile(delete=False) as temp_file: + temp_path = Path(temp_file.name) + + # Create a UserSettings object with some test data + user_settings = UserSettings() + user_settings.credentials = {"username": "test"} # type: ignore[assignment] + user_settings.preferences = {"theme": "dark"} # type: ignore[assignment] + user_settings.defaults = {"language": "en"} # type: ignore[assignment] + + # Write the user settings to the temporary file + UserService.write_to_file(user_settings, temp_path) + + # Read the file and verify its contents + with open(temp_path, encoding="utf-8") as file: + data = json.load(file) + assert data == { + "credentials": {"username": "test"}, + "preferences": {"theme": "dark"}, + "defaults": {"language": "en"}, + } + + # Clean up the temporary file + temp_path.unlink() + + +def test_merge_dicts(): + """Test merge dicts.""" + result = UserService._merge_dicts( # pylint: disable=protected-access + list_of_dicts=[ + {"a": 1, "b": 2}, + {"a": 3, "b": 4}, + ] + ) + + assert result + assert isinstance(result, dict) + assert result["a"] == 3 + assert result["b"] == 4 diff --git a/openbb_platform/core/tests/app/static/__init__.py b/openbb_platform/core/tests/app/static/__init__.py new file mode 100644 index 0000000000000000000000000000000000000000..3f4c8aabddaf03f769d25babcef1c521d87642b1 --- /dev/null +++ b/openbb_platform/core/tests/app/static/__init__.py @@ -0,0 +1 @@ +"""OpenBB Platform core tests.""" diff --git a/openbb_platform/core/tests/app/static/test_app_factory.py b/openbb_platform/core/tests/app/static/test_app_factory.py new file mode 100644 index 0000000000000000000000000000000000000000..9a8799be5038d9d1f494e7c7f87561468f19e1f3 --- /dev/null +++ b/openbb_platform/core/tests/app/static/test_app_factory.py @@ -0,0 +1,56 @@ +"""Test static app factory.""" + +# pylint: disable=redefined-outer-name + +import pytest +from openbb_core.app.model.system_settings import SystemSettings +from openbb_core.app.model.user_settings import UserSettings +from openbb_core.app.static.account import Account +from openbb_core.app.static.app_factory import create_app +from openbb_core.app.static.coverage import Coverage + + +@pytest.fixture(scope="module") +def app_factory(): + """Return app factory.""" + return create_app() + + +def test_app_factory_init(app_factory): + """Test app factory init.""" + assert app_factory + + +def test_app_system_settings(app_factory): + """Test app system settings.""" + system_settings = app_factory.system + assert system_settings + assert isinstance(system_settings, SystemSettings) + + +def test_app_user_settings(app_factory): + """Test app user settings.""" + user_settings = app_factory.user + assert user_settings + assert isinstance(user_settings, UserSettings) + + +def test_app_account(app_factory): + """Test app account.""" + account = app_factory.account + assert account + assert isinstance(account, Account) + + +def test_app_coverage(app_factory): + """Test app coverage.""" + coverage = app_factory.coverage + assert coverage + assert isinstance(coverage, Coverage) + + +def test_app_reference(app_factory): + """Test app reference.""" + reference = app_factory.reference + assert reference + assert isinstance(reference, dict) diff --git a/openbb_platform/core/tests/app/static/test_container.py b/openbb_platform/core/tests/app/static/test_container.py new file mode 100644 index 0000000000000000000000000000000000000000..2e128432b5dba96ad7215d91c63ebb51f82bad81 --- /dev/null +++ b/openbb_platform/core/tests/app/static/test_container.py @@ -0,0 +1,107 @@ +"""Test the container.py file.""" + +from re import escape +from typing import Optional +from unittest.mock import patch + +import pytest +from openbb_core.app.command_runner import CommandRunner +from openbb_core.app.model.abstract.error import OpenBBError +from openbb_core.app.model.defaults import Defaults +from openbb_core.app.model.user_settings import UserSettings +from openbb_core.app.static.container import Container +from pydantic import BaseModel, SecretStr + +# pylint: disable=redefined-outer-name,protected-access + + +@pytest.fixture(scope="module") +def container(): + """Set up test container class.""" + + class MockCredentials(BaseModel): + provider_1_api_key: Optional[SecretStr] = None + provider_2_api_key: Optional[SecretStr] = "test_key" + + MockCredentials.origins = { + "provider_1": ["provider_1_api_key"], + "provider_2": ["provider_2_api_key"], + "provider_3": [], + } + + mock_user_settings = UserSettings() + mock_user_settings.credentials = MockCredentials() + mock_user_settings.defaults = Defaults( + commands={ + "/test/command": {"provider": "provider_1"}, + "test.first_wins.command": {"provider": ["provider_1", "provider_2"]}, + "test.not_available.command": {"provider": ["x", "y", "z"]}, + } + ) + return Container(CommandRunner(user_settings=mock_user_settings)) + + +def test_container_init(container): + """Test container init.""" + assert container + + +@patch("openbb_core.app.command_runner.CommandRunner.sync_run") +def test_container__run(mock_sync_run, container): + """Test container _run method.""" + container._run() + mock_sync_run.assert_called_once() + + +def test_container__check_credentials(container): + """Test container _check_credentials method.""" + assert container._check_credentials("provider_1") is False + assert container._check_credentials("provider_2") is True + assert container._check_credentials("provider_3") is True + + +@pytest.mark.parametrize( + "choice, command, default_priority, expected, error_msg", + [ + # Provider set in args + ("fmp", ..., ..., "fmp", None), + # Provider not set in args or config, fallback to provider without keys + ( + None, + "test.no_config.command", + ("provider_1", "provider_3"), + "provider_3", + None, + ), + # Provider priority set in config, first with key wins + ( + None, + "test.first_wins.command", + ("provider_1", "provider_2", "provider_3"), + "provider_2", + None, + ), + # Provider priority set in config, with providers not available for the command + ( + None, + "test.not_available.command", + ("provider_1", "provider_2"), + OpenBBError, + escape( + "Provider fallback failed." + "\n[Providers]\n * 'x' -> not installed, please install openbb-x\n * 'y' -> not installed," + " please install openbb-y\n * 'z' -> not installed, please install openbb-z" + ), + ), + ], +) +def test_container__get_provider( + choice, command, default_priority, expected, error_msg, container +): + """Test container _get_provider method.""" + if expected is OpenBBError: + with pytest.raises(expected, match=error_msg): + container._get_provider(choice, command, default_priority) + else: + result = container._get_provider(choice, command, default_priority) + assert result == expected diff --git a/openbb_platform/core/tests/app/static/test_coverage.py b/openbb_platform/core/tests/app/static/test_coverage.py new file mode 100644 index 0000000000000000000000000000000000000000..0b70cd4b6198bbbd1f12eee0d07b6343f2d8163b --- /dev/null +++ b/openbb_platform/core/tests/app/static/test_coverage.py @@ -0,0 +1,46 @@ +"""Test the coverage.py file.""" + +# pylint: disable=redefined-outer-name + +import pytest +from openbb_core.app.command_runner import CommandRunner +from openbb_core.app.static.app_factory import BaseApp +from openbb_core.app.static.coverage import Coverage + + +@pytest.fixture(scope="module") +def app(): + """Return a BaseApp instance.""" + return BaseApp(command_runner=CommandRunner()) + + +@pytest.fixture(scope="module") +def coverage(app): + """Return coverage.""" + return Coverage(app) # Pass the BaseApp instance to Coverage + + +def test_coverage_init(coverage): + """Test coverage init.""" + assert coverage + + +def test_coverage_providers(coverage): + """Test coverage providers.""" + provider_coverage = coverage.providers + assert provider_coverage + assert isinstance(provider_coverage, dict) + + +def test_coverage_commands(coverage): + """Test coverage commands.""" + command_coverage = coverage.commands + assert command_coverage + assert isinstance(command_coverage, dict) + + +def test_coverage_reference(coverage): + """Test coverage reference.""" + reference = coverage.reference + assert reference + assert isinstance(reference, dict) diff --git a/openbb_platform/core/tests/app/static/test_filters.py b/openbb_platform/core/tests/app/static/test_filters.py new file mode 100644 index 0000000000000000000000000000000000000000..9532867fe4b1bab8e58ae0850772e07d05d19192 --- /dev/null +++ b/openbb_platform/core/tests/app/static/test_filters.py @@ -0,0 +1,69 @@ +"""Test the filters.py file.""" + +import numpy as np +import pandas as pd +import pytest +from openbb_core.app.static.utils.filters import filter_inputs +from openbb_core.provider.abstract.data import Data + + +def test_filter_inputs_not_df(): + """Test filter_inputs.""" + kwargs = {"num": 1} + kwargs = filter_inputs(False, None, **kwargs) + + assert kwargs["num"] == 1 + + +def test_filter_inputs_df(): + """Test filter_inputs.""" + df = pd.DataFrame({"a": [1, 2, 3], "b": [4, 5, 6]}) + kwargs = {"data": df} + kwargs = filter_inputs(True, None, **kwargs) + + assert isinstance(kwargs["data"], list) + + +# Example instances of each supported type for testing +example_dict = {"a": 1, "b": 2} +example_list = [{"a": 1, "b": 2}, {"a": 3, "b": 4}] +example_series = pd.Series([1, 2, 3]) +example_dataframe = pd.DataFrame({"col1": [1, 2], "col2": [3, 4]}) +example_ndarray = np.array([[1, 2], [3, 4]]) +example_data_list_series = [pd.Series([4, 5, 6])] +example_data_list_df = [pd.DataFrame({"col3": [5, 6], "col4": [7, 8]})] + +# Create a list of scenarios to test +test_data = [ + example_dict, + example_list, + example_series, + example_dataframe, + example_ndarray, + example_data_list_series, + example_data_list_df, +] + + +@pytest.mark.parametrize("input_data", test_data) +def test_filter_inputs( + input_data, +): + """Test the filter_inputs function.""" + result = filter_inputs(data=input_data, data_processing=True) + + # Assert that the result is a dictionary + assert isinstance(result, dict), "filter_inputs should return a dictionary" + + # Assert that the 'data' key is present in the result + assert "data" in result, "Resulting dictionary should have a 'data' key" + + # Assert that the type of 'data' in the result is the expected type + if isinstance(result["data"], list): + assert isinstance( + result["data"][0], Data + ), f"The 'data' key should be a list of {Data.__name__}" + else: + assert isinstance( + result["data"], Data + ), f"The 'data' key should be of type {Data.__name__}" diff --git a/openbb_platform/core/tests/app/static/test_package_builder.py b/openbb_platform/core/tests/app/static/test_package_builder.py new file mode 100644 index 0000000000000000000000000000000000000000..cc8eb801bb01ef95b1b9282f848c39dbd09ae24d --- /dev/null +++ b/openbb_platform/core/tests/app/static/test_package_builder.py @@ -0,0 +1,743 @@ +"""Test the package_builder.py file.""" + +# pylint: disable=redefined-outer-name, protected-access + +from dataclasses import dataclass +from inspect import _empty +from pathlib import Path +from typing import Any, Dict, List, Tuple, Union +from unittest.mock import PropertyMock, mock_open, patch + +import pandas +import pytest +from importlib_metadata import EntryPoint, EntryPoints +from openbb_core.app.static.package_builder import ( + ClassDefinition, + DocstringGenerator, + ImportDefinition, + MethodDefinition, + ModuleBuilder, + PackageBuilder, + Parameter, + PathHandler, +) +from openbb_core.env import Env +from pydantic import Field +from typing_extensions import Annotated + + +@pytest.fixture(scope="module") +def tmp_openbb_dir(tmp_path_factory): + """Return a temporary openbb directory.""" + return tmp_path_factory.mktemp("openbb") + + +@pytest.fixture(scope="module") +def package_builder(tmp_openbb_dir): + """Return package builder.""" + return PackageBuilder(tmp_openbb_dir) + + +def test_package_builder_init(package_builder): + """Test package builder init.""" + assert package_builder + + +def test_package_builder_build(package_builder): + """Test package builder build.""" + package_builder.build() + + +def test_save_modules(package_builder): + """Test save module.""" + package_builder._save_modules() + + +def test_save_package(package_builder): + """Test save package.""" + package_builder._save_package() + + +def test_run_linters(package_builder): + """Test run linters.""" + package_builder._run_linters() + + +def test_write(package_builder): + """Test save to package.""" + package_builder._write(code="", name="test", extension="json") + + +@pytest.fixture(scope="module") +def module_builder(): + """Return module builder.""" + return ModuleBuilder() + + +def test_module_builder_init(module_builder): + """Test module builder init.""" + assert module_builder + + +@pytest.fixture(scope="module") +def class_definition(): + """Return class definition.""" + return ClassDefinition() + + +def test_class_definition_init(class_definition): + """Test class definition init.""" + assert class_definition + + +def test_build(class_definition): + """Test build.""" + code = class_definition.build("openbb_core.app.static.container.Container") + assert code + + +@pytest.fixture(scope="module") +def method_definition(): + """Return method definition.""" + return MethodDefinition() + + +def test_method_definition_init(method_definition): + """Test method definition init.""" + assert method_definition + + +def test_build_class_loader_method(method_definition): + """Test build class loader method.""" + code = method_definition.build_class_loader_method( + "openbb_core.app.static.container.Container" + ) + assert code + + +def test_get_type(method_definition): + """Test get type.""" + type_ = method_definition.get_type(field=Parameter.empty) + assert type_ + assert isinstance(type_, type) + + +def test_get_type_hint(method_definition): + """Test get type hint.""" + + class TestField: + annotation = int + + field = TestField() + result = method_definition.get_type(field) + assert result is int + + +def test_field_with_type_attribute_missing_type(method_definition): + """Test field with type attribute missing type.""" + + class TestField: + annotation = Parameter.empty + + field = TestField() + result = method_definition.get_type(field) + assert result is _empty + + +def test_get_default(method_definition): + """Test get default.""" + + class TestField: + default = Field(default=42) + + field = TestField() + result = method_definition.get_default(field) + assert result == 42 + + +def test_get_default_none(method_definition): + """Test get default.""" + + class TestField: + default = None + + field = TestField() + result = method_definition.get_default(field) + assert result is None + + +def test_get_default_default_value(method_definition): + """Test get default default value.""" + + class TestField: + default = type(Ellipsis)() + + field = TestField() + result = method_definition.get_default(field) + assert result is None + + +def test_get_default_no_default(method_definition): + """Test get default no default.""" + + class TestField: + pass + + field = TestField() + result = method_definition.get_default(field) + assert result == _empty + + +def test_is_annotated_dc(method_definition): + """Test is annotated dc.""" + result = method_definition.is_annotated_dc(annotation=Parameter.empty) + assert not result + + +def test_is_annotated_dc_annotated(method_definition): + """Test is annotated dc annotated.""" + + @dataclass + class TestAnnotatedDataClass: + """Test annotated data class.""" + + value: int + + annotated_dataclass = Annotated[TestAnnotatedDataClass, "test_annotation"] + result = method_definition.is_annotated_dc(annotation=annotated_dataclass) + assert result + + +@pytest.mark.parametrize( + "params, var_kw, expected", + [ + ( + { + "provider": Parameter.empty, + "extra_params": Parameter.empty, + "param1": Parameter.empty, + "param2": Parameter.empty, + }, + None, + ["extra_params", "param1", "param2", "provider"], + ), + ( + { + "param1": Parameter.empty, + "provider": Parameter.empty, + "extra_params": Parameter.empty, + "param2": Parameter.empty, + }, + ["extra_params"], + ["param1", "param2", "provider", "extra_params"], + ), + ( + { + "param2": Parameter.empty, + "any_kwargs": Parameter.empty, + "provider": Parameter.empty, + "param1": Parameter.empty, + }, + ["any_kwargs"], + ["param2", "param1", "provider", "any_kwargs"], + ), + ( + { + "any_kwargs": Parameter.empty, + "extra_params": Parameter.empty, + "provider": Parameter.empty, + "param1": Parameter.empty, + "param2": Parameter.empty, + }, + ["any_kwargs", "extra_params"], + ["param1", "param2", "provider", "any_kwargs", "extra_params"], + ), + ], +) +def test_reorder_params(method_definition, params, var_kw, expected): + """Test reorder params, ensure var_kw are last after 'provider'.""" + result = method_definition.reorder_params(params, var_kw) + assert result + assert list(result.keys()) == expected + + +def test_build_func_params(method_definition): + """Test build func params.""" + param_map = { + "param1": Parameter( + name="param1", kind=Parameter.POSITIONAL_OR_KEYWORD, annotation=type(None) + ), + "param2": Parameter( + "param2", kind=Parameter.POSITIONAL_OR_KEYWORD, annotation=int + ), + "param3": Parameter( + "param3", + kind=Parameter.POSITIONAL_OR_KEYWORD, + annotation=pandas.core.frame.DataFrame, + ), + } + + expected_output = ( + "param1: None,\n param2: int,\n param3: pandas.DataFrame" + ) + output = method_definition.build_func_params(param_map) + + assert output == expected_output + + +@pytest.mark.parametrize( + "return_type, expected_output", + [ + (_empty, "None"), + (int, "int"), + ], +) +def test_build_func_returns(method_definition, return_type, expected_output): + """Test build func returns.""" + output = method_definition.build_func_returns(return_type=return_type) + assert output == expected_output + + +@patch("openbb_core.app.static.package_builder.MethodDefinition") +def test_build_command_method_signature(mock_method_definitions, method_definition): + """Test build command method signature.""" + mock_method_definitions.is_deprecated_function.return_value = False + formatted_params = { + "param1": Parameter("NoneType", kind=Parameter.POSITIONAL_OR_KEYWORD), + "param2": Parameter("int", kind=Parameter.POSITIONAL_OR_KEYWORD), + } + return_type = int + output = method_definition.build_command_method_signature( + func_name="test_func", + formatted_params=formatted_params, + return_type=return_type, + path="test_path", + ) + assert output + + +@patch("openbb_core.app.static.package_builder.MethodDefinition") +def test_build_command_method_signature_deprecated( + mock_method_definitions, method_definition +): + """Test build command method signature.""" + mock_method_definitions.is_deprecated_function.return_value = True + formatted_params = { + "param1": Parameter("NoneType", kind=Parameter.POSITIONAL_OR_KEYWORD), + "param2": Parameter("int", kind=Parameter.POSITIONAL_OR_KEYWORD), + } + return_type = int + output = method_definition.build_command_method_signature( + func_name="test_func", + formatted_params=formatted_params, + return_type=return_type, + path="test_path", + ) + assert "@deprecated" in output + + +def test_build_command_method_doc(method_definition): + """Test build command method doc.""" + + def some_func(): + """Do some func doc.""" + + formatted_params = { + "param1": Parameter("NoneType", kind=Parameter.POSITIONAL_OR_KEYWORD), + "param2": Parameter("int", kind=Parameter.POSITIONAL_OR_KEYWORD), + } + + output = method_definition.build_command_method_doc( + path="/menu/submenu/command", func=some_func, formatted_params=formatted_params + ) + assert output + assert isinstance(output, str) + + +def test_build_command_method_body(method_definition): + """Test build command method body.""" + + def some_func(): + """Do some func doc.""" + return 42 + + with patch( + "openbb_core.app.static.package_builder.MethodDefinition.is_data_processing_function", + return_value=False, + ), patch( + "openbb_core.app.static.package_builder.MethodDefinition.is_deprecated_function", + return_value=False, + ): + output = method_definition.build_command_method_body( + path="openbb_core.app.static.container.Container", func=some_func + ) + + assert output + assert isinstance(output, str) + + +def test_build_command_method(method_definition): + """Test build command method.""" + + def some_func(): + """Do some func doc.""" + return 42 + + with patch( + "openbb_core.app.static.package_builder.MethodDefinition.is_data_processing_function", + return_value=False, + ), patch( + "openbb_core.app.static.package_builder.MethodDefinition.is_deprecated_function", + return_value=False, + ): + output = method_definition.build_command_method( + path="openbb_core.app.static.container.Container", + func=some_func, + model_name=None, + ) + + assert output + assert isinstance(output, str) + + +@pytest.fixture(scope="module") +def import_definition(): + """Return import definition.""" + return ImportDefinition() + + +def test_import_definition_init(import_definition): + """Test import definition init.""" + assert import_definition + + +def test_filter_hint_type_list(import_definition): + """Test filter type hint list.""" + output = import_definition.filter_hint_type_list( + hint_type_list=[int, str, float, bool, _empty, _empty, _empty, _empty] + ) + assert output == [] + + +def test_import_definition_get_path_hint_type_list(import_definition): + """Test import definition get path hint type list.""" + hint_type_list = import_definition.get_path_hint_type_list( + path="openbb_core.app.static.container.Container" + ) + assert hint_type_list == [] + + +def test_import_definition_build(import_definition): + """Test import definition build.""" + code = import_definition.build(path="openbb_core.app.static.container.Container") + assert code + + +@pytest.fixture(scope="module") +def path_handler(): + """Return path handler.""" + return PathHandler() + + +def test_path_handler_init(path_handler): + """Test path handler init.""" + assert path_handler + + +@pytest.fixture(scope="module") +def route_map(path_handler): + """Return route map.""" + return path_handler.build_route_map() + + +def test_build_route_map(route_map): + """Test build route map.""" + assert route_map + assert isinstance(route_map, dict) + + +@pytest.fixture(scope="module") +def path_list(path_handler, route_map): + """Return path list.""" + return path_handler.build_path_list(route_map=route_map) + + +def test_build_path_list(path_list): + """Test build path list.""" + assert path_list + assert isinstance(path_list, list) + + +def test_get_route(path_handler, route_map): + """Test get route.""" + route = path_handler.get_route(route_map=route_map, path="/equity/price/historical") + + assert route + + +def test_get_child_path_list(path_handler, path_list): + """Test get child path list.""" + child_path_list = path_handler.get_child_path_list( + path="/equity", path_list=path_list + ) + + assert child_path_list + assert isinstance(child_path_list, list) + + +def test_clean_path(path_handler): + """Test clean path.""" + path = "/equity/price/historical" + result = path_handler.clean_path(path=path) + assert result == "equity_price_historical" + + +def test_build_module_name(path_handler): + """Test build module name.""" + module_name = path_handler.build_module_name(path="") + assert module_name == "__extensions__" + + module_name = path_handler.build_module_name(path="/equity/price/historical") + assert module_name == "equity_price_historical" + + +def test_build_module_class(path_handler): + """Test build module class.""" + module_class = path_handler.build_module_class(path="") + assert module_class == "Extensions" + + module_class = path_handler.build_module_class(path="/equity/price/historical") + assert module_class == "ROUTER_equity_price_historical" + + +@pytest.fixture(scope="module") +def docstring_generator(): + """Return package builder.""" + return DocstringGenerator() + + +def test_docstring_generator_init(docstring_generator): + """Test docstring generator init.""" + assert docstring_generator + + +def test_get_OBBject_description(docstring_generator): + """Test build docstring.""" + docstring = docstring_generator.get_OBBject_description( + "SomeModel", "some_provider" + ) + assert docstring + + +def test_generate_model_docstring(docstring_generator): + """Test generate model docstring.""" + docstring = "" + model_name = "WorldNews" + summary = "This is a summary." + sections = ["description", "parameters", "returns", "examples"] + + pi = docstring_generator.provider_interface + kwarg_params = pi.params[model_name]["extra"].__dataclass_fields__ + return_schema = pi.return_schema[model_name] + returns = return_schema.model_fields + + formatted_params = { + "param1": Parameter("NoneType", kind=Parameter.POSITIONAL_OR_KEYWORD), + "param2": Parameter("int", kind=Parameter.POSITIONAL_OR_KEYWORD), + } + explicit_dict = dict(formatted_params) + + docstring = docstring_generator.generate_model_docstring( + model_name=model_name, + summary=summary, + explicit_params=explicit_dict, + kwarg_params=kwarg_params, + returns=returns, + results_type="List[WorldNews]", + sections=sections, + ) + + assert docstring + assert summary in docstring + assert "Parameters" in docstring + assert "Returns" in docstring + assert "WorldNews" in docstring + + +@pytest.mark.parametrize( + "type_, expected", + [ + (Any, []), + (List[str], ["List"]), + (Dict[str, str], ["Dict"]), + (Tuple[str], ["Tuple"]), + (Union[List[str], Dict[str, str], Tuple[str]], ["List", "Dict", "Tuple"]), + ], +) +def test__get_generic_types(docstring_generator, type_, expected): + """Test get generic types.""" + output = docstring_generator._get_generic_types(type_, []) + assert output == expected + + +@pytest.mark.parametrize( + "items, model, expected", + [ + ([], "test_model", "test_model"), + (["List"], "test_model", "List[test_model]"), + (["Dict"], "test_model", "Dict[str, test_model]"), + (["Tuple"], "test_model", "Tuple[test_model]"), + ( + ["List", "Dict", "Tuple"], + "test_model", + "Union[List[test_model], Dict[str, test_model], Tuple[test_model]]", + ), + ], +) +def test__get_repr(docstring_generator, items, model, expected): + output = docstring_generator._get_repr(items, model) + assert output == expected + + +def test_generate(docstring_generator): + """Test generate docstring.""" + + def some_func(): + """Define Some func docstring.""" + + formatted_params = { + "param1": Parameter("NoneType", kind=Parameter.POSITIONAL_OR_KEYWORD), + "param2": Parameter("int", kind=Parameter.POSITIONAL_OR_KEYWORD), + } + + doc = docstring_generator.generate( + path="/menu/submenu/command", + func=some_func, + formatted_params=formatted_params, + model_name="WorldNews", + ) + assert doc + assert "Parameters" in doc + assert "Returns" in doc + + +def test__read(package_builder, tmp_openbb_dir): + """Test read.""" + + PATH = "openbb_core.app.static.package_builder." + open_mock = mock_open() + with patch(PATH + "open", open_mock), patch(PATH + "load") as mock_load: + package_builder._read(Path(tmp_openbb_dir / "assets" / "reference.json")) + open_mock.assert_called_once_with( + Path(tmp_openbb_dir / "assets" / "reference.json") + ) + mock_load.assert_called_once() + + +@pytest.mark.parametrize( + "ext_built, ext_installed, ext_inst_version, expected_add, expected_remove", + [ + ( + { + "openbb_core_extension": [ + "ext_1@0.0.0", + "ext_2@0.0.0", + ], + "openbb_provider_extension": [ + "prov_1@0.0.0", + "prov_2@1.1.1", + ], + }, + EntryPoints( + ( + EntryPoint( + name="ext_2", value="...", group="openbb_core_extension" + ), + EntryPoint( + name="prov_2", value="...", group="openbb_provider_extension" + ), + ) + ), + "0.0.0", + {"prov_2@0.0.0"}, + {"ext_1@0.0.0", "prov_1@0.0.0", "prov_2@1.1.1"}, + ), + ( + { + "openbb_core_extension": ["ext_1@9.9.9"], + "openbb_provider_extension": ["prov_2@0.0.0"], + }, + EntryPoints( + ( + EntryPoint( + name="ext_2", value="...", group="openbb_core_extension" + ), + EntryPoint( + name="prov_1", value="...", group="openbb_provider_extension" + ), + ) + ), + "5.5.5", + {"ext_2@5.5.5", "prov_1@5.5.5"}, + {"ext_1@9.9.9", "prov_2@0.0.0"}, + ), + ], +) +def test_package_diff( + package_builder, + ext_built, + ext_installed, + ext_inst_version, + expected_add, + expected_remove, +): + """Test package differences.""" + + def mock_entry_points(group): + """Mock entry points.""" + return ext_installed.select(**{"group": group}) + + PATH = "openbb_core.app.static.package_builder." + with patch(PATH + "entry_points", mock_entry_points), patch.object( + EntryPoint, "dist", new_callable=PropertyMock + ) as mock_obj: + + class MockPathDistribution: + version = ext_inst_version + + mock_obj.return_value = MockPathDistribution() + + add, remove = package_builder._diff(ext_built) + + # We add whatever is not built, but is installed + assert add == expected_add + # We remove whatever is built, but is not installed + assert remove == expected_remove + + +@pytest.mark.parametrize( + "add, remove, openbb_auto_build", + [ + (set(), set(), True), + ({"this"}, set(), True), + (set(), {"that"}, True), + ({"this"}, {"that"}, True), + ({"this"}, {"that"}, False), + ], +) +def test_auto_build(package_builder, add, remove, openbb_auto_build): + """Test auto build.""" + + with patch.object(PackageBuilder, "_diff") as mock_assets_diff, patch.object( + PackageBuilder, "build" + ) as mock_build, patch.object(Env, "AUTO_BUILD", openbb_auto_build): + mock_assets_diff.return_value = add, remove + + package_builder.auto_build() + + if openbb_auto_build: + if add or remove: + mock_build.assert_called_once() + else: + mock_assets_diff.assert_not_called() + mock_build.assert_not_called() diff --git a/openbb_platform/core/tests/app/static/test_reference_loader.py b/openbb_platform/core/tests/app/static/test_reference_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..20ef1fabf9660fd897172d09fb7ef8b54c2015a2 --- /dev/null +++ b/openbb_platform/core/tests/app/static/test_reference_loader.py @@ -0,0 +1,64 @@ +"""Tests for the ReferenceLoader class.""" + +import json +from pathlib import Path + +import pytest +from openbb_core.app.static.reference_loader import ReferenceLoader + +# pylint: disable=W0212, W0621 + + +@pytest.fixture(scope="function") +def reference_loader(): + """Fixture to create a ReferenceLoader instance.""" + ReferenceLoader._instances = {} + yield ReferenceLoader + ReferenceLoader._instances = {} + + +@pytest.fixture +def mock_reference_data(tmp_path): + """Fixture to create a mock reference.json file.""" + directory = tmp_path / "assets" + directory.mkdir(parents=True) + reference_file = directory / "reference.json" + mock_data = {"key": "value"} + with open(reference_file, "w") as f: + json.dump(mock_data, f) + return tmp_path + + +def test_load_reference_data(mock_reference_data, reference_loader): + """Test loading of reference data.""" + loader = reference_loader(directory=mock_reference_data) + assert loader.reference == { + "key": "value" + }, "Reference data should match the mock data" + + +def test_default_directory_load(reference_loader): + """Test loading from the default directory.""" + # This test assumes the default directory and reference.json file exist and are correctly set up + loader = reference_loader() + # Perform a basic check to ensure some data is loaded; specifics will depend on the actual reference.json content + assert ( + isinstance(loader.reference, dict) and loader.reference + ), "Should load data from the default directory" + + +def test_missing_reference_file(tmp_path, reference_loader): + """Test behavior when the reference.json file is missing.""" + loader = reference_loader( + directory=tmp_path + ) # tmp_path does not contain a reference.json file + assert ( + loader.reference == {} + ), "Should return an empty dictionary if the reference file is missing" + + +def test_nonexistent_directory(reference_loader): + """Test initialization with a nonexistent directory.""" + assert ( + reference_loader(directory=Path("/nonexistent/path")).reference == {} + ), "Should return an empty dictionary if the directory does not exist" diff --git a/openbb_platform/core/tests/app/static/utils/test_linters.py b/openbb_platform/core/tests/app/static/utils/test_linters.py new file mode 100644 index 0000000000000000000000000000000000000000..1ebd54b66d02385c3752304da8bc379a9b922df2 --- /dev/null +++ b/openbb_platform/core/tests/app/static/utils/test_linters.py @@ -0,0 +1,45 @@ +"""Test linters.py file.""" + +# pylint: disable=redefined-outer-name + +import pytest +from openbb_core.app.static.package_builder import ( + Linters, +) + + +@pytest.fixture(scope="module") +def tmp_package_dir(tmp_path_factory): + """Return a temporary package directory.""" + return tmp_path_factory.mktemp("package") + + +@pytest.fixture(scope="module") +def linters(tmp_package_dir): + """Return linters.""" + return Linters(tmp_package_dir) + + +def test_linters_init(linters): + """Test linters init.""" + assert linters + + +def test_print_separator(linters): + """Test print separator.""" + linters.print_separator(symbol="AAPL") + + +def test_run(linters): + """Test run.""" + linters.run(linter="ruff") + + +def test_ruff(linters): + """Test ruff.""" + linters.ruff() + + +def test_black(linters): + """Test black.""" + linters.black() diff --git a/openbb_platform/core/tests/app/test_command_runner.py b/openbb_platform/core/tests/app/test_command_runner.py new file mode 100644 index 0000000000000000000000000000000000000000..51fd5f23342b689c2b3e77802ca31bf81ade3489 --- /dev/null +++ b/openbb_platform/core/tests/app/test_command_runner.py @@ -0,0 +1,438 @@ +"""Test command runner.""" + +from dataclasses import dataclass +from inspect import Parameter +from typing import Dict, List +from unittest.mock import Mock, patch + +import pytest +from fastapi import Query +from fastapi.params import Query as QueryParam +from openbb_core.app.command_runner import ( + CommandRunner, + ExecutionContext, + ParametersBuilder, + StaticCommandRunner, +) +from openbb_core.app.model.abstract.warning import OpenBBWarning +from openbb_core.app.model.command_context import CommandContext +from openbb_core.app.model.obbject import OBBject +from openbb_core.app.model.system_settings import SystemSettings +from openbb_core.app.model.user_settings import UserSettings +from openbb_core.app.provider_interface import ExtraParams +from openbb_core.app.router import CommandMap +from pydantic import BaseModel, ConfigDict + +# pylint: disable=W0613, W0621, W0102, W0212 + + +class MockAPIRoute: + """MockAPIRoute""" + + def __init__(self, route): + """Initialize the mock API route.""" + self.route = route + self.openapi_extra = {"no_validate": True} + + +class MockExecutionContext: + """MockExecutionContext""" + + _route_map = {"mock/route": "mock_func"} + + def __init__(self, cmd_map, route, sys, user): + """Initialize the mock execution context.""" + self.command_map = cmd_map + self.route = route + self.system_settings = sys + self.user_settings = user + + @property + def api_route(self) -> str: + """Mock API route.""" + return MockAPIRoute(self.route) + + +@pytest.fixture() +def execution_context(): + """Set up execution context.""" + sys = SystemSettings() + user = UserSettings() + cmd_map = CommandMap() + return MockExecutionContext(cmd_map, "mock/route", sys, user) + + +@pytest.fixture() +def mock_func(): + """Set up mock function.""" + + def mock_func( + a: int, b: int, c: float = 10.0, d: int = 5, provider_choices: Dict = {} + ) -> None: + """Mock function.""" + + return mock_func + + +def test_execution_context(): + """Test execution context.""" + sys = SystemSettings() + user = UserSettings() + cmd_map = CommandMap() + ctx = ExecutionContext(cmd_map, "mock/route", sys, user) + + assert isinstance(ctx, ExecutionContext) + assert ctx.system_settings == sys + assert ctx.user_settings == user + assert ctx.command_map == cmd_map + assert ctx.route == "mock/route" + + +def test_parameters_builder(): + """Test parameters builder.""" + assert ParametersBuilder() + + +@pytest.mark.parametrize( + "input_func, expected_annotations", + [ + (lambda x: x, {"x": Parameter(name="x", kind=Parameter.POSITIONAL_OR_KEYWORD)}), + ( + lambda a, b, c=10: a + b + c, + { + "a": Parameter(name="a", kind=Parameter.POSITIONAL_OR_KEYWORD), + "b": Parameter(name="b", kind=Parameter.POSITIONAL_OR_KEYWORD), + "c": Parameter( + name="c", kind=Parameter.POSITIONAL_OR_KEYWORD, default=10 + ), + }, + ), + ( + lambda x, y, *, z: x + y + z, + { + "x": Parameter(name="x", kind=Parameter.POSITIONAL_OR_KEYWORD), + "y": Parameter(name="y", kind=Parameter.POSITIONAL_OR_KEYWORD), + "z": Parameter(name="z", kind=Parameter.KEYWORD_ONLY), + }, + ), + ], +) +def test_parameters_builder_get_polished_func(input_func, expected_annotations): + """Test get_polished_func.""" + polished_func = ParametersBuilder.get_polished_func(input_func) + + assert polished_func.__annotations__ == expected_annotations + assert polished_func.__signature__ == input_func.__signature__ # type: ignore[attr-defined] + + +@pytest.mark.parametrize( + "input_func, expected_params", + [ + (lambda x: x, [Parameter("x", Parameter.POSITIONAL_OR_KEYWORD)]), + ( + lambda a, b, c=10: a + b + c, + [ + Parameter("a", Parameter.POSITIONAL_OR_KEYWORD), + Parameter("b", Parameter.POSITIONAL_OR_KEYWORD), + Parameter("c", Parameter.POSITIONAL_OR_KEYWORD, default=10), + ], + ), + ( + lambda x, y, *, z: x + y + z, + [ + Parameter("x", Parameter.POSITIONAL_OR_KEYWORD), + Parameter("y", Parameter.POSITIONAL_OR_KEYWORD), + Parameter("z", Parameter.KEYWORD_ONLY), + ], + ), + ], +) +def test_parameters_builder_get_polished_parameter_list(input_func, expected_params): + """Test get_polished_parameter_list.""" + param_list = ParametersBuilder.get_polished_parameter_list(input_func) + + assert param_list == expected_params + + +@pytest.mark.parametrize( + "input_func, input_args, input_kwargs, expected_result", + [ + (lambda x: x, (5,), {}, {"x": 5}), + (lambda a, b, c=10: a + b + c, (2, 3), {}, {"a": 2, "b": 3, "c": 10}), + (lambda x, y, *, z: x + y + z, (1, 2), {"z": 3}, {"x": 1, "y": 2, "z": 3}), + ], +) +def test_parameters_builder_merge_args_and_kwargs( + input_func, input_args, input_kwargs, expected_result +): + """Test merge_args_and_kwargs.""" + result = ParametersBuilder.merge_args_and_kwargs( + input_func, input_args, input_kwargs + ) + + assert result == expected_result + + +@pytest.mark.parametrize( + "kwargs, system_settings, user_settings, expected_result", + [ + ( + {"cc": "existing_cc"}, + SystemSettings(), + UserSettings(), + {"cc": "mock_cc"}, + ), + ], +) +def test_parameters_builder_update_command_context( + kwargs, system_settings, user_settings, expected_result +): + """Test update_command_context.""" + + def other_mock_func( + cc: CommandContext, + a: int, + b: int, + ) -> None: + """Mock function.""" + + result = ParametersBuilder.update_command_context( + other_mock_func, kwargs, system_settings, user_settings + ) + + assert isinstance(result["cc"], CommandContext) + assert result["cc"].system_settings == system_settings + assert result["cc"].user_settings == user_settings + + +def test_parameters_builder_validate_kwargs(mock_func): + """Test validate_kwargs.""" + # TODO: add more test cases with @pytest.mark.parametrize + + result = ParametersBuilder.validate_kwargs( + mock_func, {"a": 1, "b": "2", "c": 3.0, "d": 4} + ) + + assert result == {"a": 1, "b": 2, "c": 3.0, "d": 4, "provider_choices": {}} + + +@pytest.mark.parametrize( + "extra_params, base, expect", + [ + ( + {"exists": ...}, + ExtraParams, + None, + ), + ( + {"inexistent_field": ...}, + ExtraParams, + OpenBBWarning, + ), + ], +) +def test_parameters_builder__warn_kwargs(extra_params, base, expect): + """Test _warn_kwargs.""" + + @dataclass + class SomeModel(base): # type: ignore[misc,valid-type] + """SomeModel""" + + exists: QueryParam = Query(...) + + class Model(BaseModel): + """Model""" + + model_config = ConfigDict(arbitrary_types_allowed=True) + extra_params: SomeModel + + with pytest.warns(expect) as warning_info: + # pylint: disable=protected-access + ParametersBuilder._warn_kwargs(extra_params, Model) + + if not expect: + assert len(warning_info) == 0 + + +def test_parameters_builder_build(mock_func, execution_context): + """Test build.""" + # TODO: add more test cases with @pytest.mark.parametrize + + with patch("openbb_core.app.provider_interface.ProviderInterface") as mock_pi: + mock_pi.available_providers = ["provider1", "provider2"] + + result = ParametersBuilder.build( + args=(1, 2), + kwargs={ + "c": 3, + "d": "4", + "provider_choices": {"provider": "provider1"}, + }, + func=mock_func, + execution_context=execution_context, + ) + + assert result == { + "a": 1, + "b": 2, + "c": 3.0, + "d": 4, + "provider_choices": {"provider": "provider1"}, + } + + +def test_command_runner(): + """Test command runner.""" + assert CommandRunner() + + +def test_command_runner_properties(): + """Test properties.""" + sys = SystemSettings() + user = UserSettings() + cmd_map = CommandMap() + runner = CommandRunner(cmd_map, sys, user) + + assert isinstance(runner, CommandRunner) + assert runner.system_settings == sys + assert runner.user_settings == user + assert runner.command_map == cmd_map + + +@patch("openbb_core.app.command_runner.CommandRunner") +def test_command_runner_run(_): + """Test run.""" + runner = CommandRunner() + + with patch( + "openbb_core.app.command_runner.StaticCommandRunner", + **{"return_value.run": True}, + ): + assert runner.run("mock/route") + + +@pytest.mark.asyncio +@patch("openbb_core.app.router.CommandMap.get_command") +@patch("openbb_core.app.command_runner.StaticCommandRunner._execute_func") +async def test_static_command_runner_run( + mock_execute_func, mock_get_command, execution_context +): + """Test static command runner run.""" + + def other_mock_func(a: int, b: int, c: int, d: int) -> List[int]: + """Mock function.""" + return [a, b, c, d] + + class MockOBBject: + """Mock OBBject""" + + def __init__(self, results): + """Initialize the mock object.""" + self.results = results + self.extra = {} + self.extra["metadata"] = {"test": "test"} + self.provider = None + + mock_get_command.return_value = other_mock_func + mock_execute_func.return_value = MockOBBject(results=[1, 2, 3, 4]) + + result = await StaticCommandRunner.run(execution_context, 1, 2, c=3, d=4) + + assert result.results == [1, 2, 3, 4] + assert hasattr(result, "extra") + assert result.extra.get("metadata") is not None + + +@pytest.mark.asyncio +@patch("openbb_core.app.logs.logging_service.LoggingService") +@patch("openbb_core.app.command_runner.ParametersBuilder.build") +@patch("openbb_core.app.command_runner.StaticCommandRunner._command") +@patch("openbb_core.app.command_runner.StaticCommandRunner._chart") +async def test_static_command_runner_execute_func( + mock_chart, + mock_command, + mock_parameters_builder_build, + mock_logging_service, + execution_context, + mock_func, +): + """Test execute_func.""" + + static_command_runner = StaticCommandRunner() + + mock_parameters_builder_build.return_value = { + "a": 1, + "b": 2, + "c": 3.0, + "d": 4, + "provider_choices": {"provider": ["provider1", "provider2"]}, + "chart": True, + } + mock_logging_service.log.return_value = None + mock_command.return_value = OBBject( + results=[1, 2, 3, 4], + provider="mock_provider", + accessors={"charting": Mock()}, + ) + mock_chart.return_value = None + + result = await static_command_runner._execute_func( + "mock/route", (1, 2, 3, 4), execution_context, mock_func, {"chart": True} + ) + + assert result.results == [1, 2, 3, 4] + mock_logging_service.assert_called_once() + mock_parameters_builder_build.assert_called_once() + mock_command.assert_called_once() + mock_chart.assert_called_once() + + +def test_static_command_runner_chart(): + """Test _chart method when charting is in obbject.accessors.""" + + mock_obbject = OBBject( + results=[ + {"date": "1990", "value": 100}, + {"date": "1991", "value": 200}, + {"date": "1992", "value": 300}, + ], + provider="mock_provider", + accessors={"charting": Mock()}, + ) + mock_obbject.charting.show = Mock() + + StaticCommandRunner._chart(mock_obbject) # pylint: disable=protected-access + + mock_obbject.charting.show.assert_called_once() + + +@pytest.mark.asyncio +async def test_static_command_runner_command(): + """Test command.""" + + class MockOBBject: + """Mock OBBject""" + + def __init__(self, results, **kwargs): + self.results = results + self.extra = {} + self.provider = kwargs.get("provider_choices").provider + + class MockProviderChoices: + """Mock ProviderChoices""" + + def __init__(self, provider): + self.provider = provider + + def other_mock_func(**kwargs): + return MockOBBject([1, 2, 3, 4], **kwargs) + + mock_provider_choices = MockProviderChoices(provider="mock_provider") + + result = await StaticCommandRunner._command( + func=other_mock_func, + kwargs={"provider_choices": mock_provider_choices}, + ) + + assert result.results == [1, 2, 3, 4] + assert result.provider == "mock_provider" diff --git a/openbb_platform/core/tests/app/test_deprecation.py b/openbb_platform/core/tests/app/test_deprecation.py new file mode 100644 index 0000000000000000000000000000000000000000..bca6c5381ae34c5e426980bef8df4a9abced0042 --- /dev/null +++ b/openbb_platform/core/tests/app/test_deprecation.py @@ -0,0 +1,27 @@ +"""Test deprecated commands.""" + +import unittest + +from openbb_core.app.static.package_builder import PathHandler +from openbb_core.app.version import VERSION, get_major_minor + + +class DeprecatedCommandsTest(unittest.TestCase): + """Test deprecated commands.""" + + def test_deprecated_commands(self): + """Test deprecated commands.""" + current_major_minor = get_major_minor(VERSION) + route_map = PathHandler.build_route_map() + + for path, route in route_map.items(): + with self.subTest(i=path): + if getattr(route, "deprecated", False): + deprecation_message = getattr(route, "summary", "") + if hasattr(deprecation_message, "metadata"): + obb_deprecation_warning = deprecation_message.metadata + + assert ( + obb_deprecation_warning.expected_removal + != current_major_minor + ), f"The expected removal version of `{path}` matches the current version, please remove it." diff --git a/openbb_platform/core/tests/app/test_extension_loader.py b/openbb_platform/core/tests/app/test_extension_loader.py new file mode 100644 index 0000000000000000000000000000000000000000..1238f1fd4a83ef4a3c218755305aad84fd9de6d6 --- /dev/null +++ b/openbb_platform/core/tests/app/test_extension_loader.py @@ -0,0 +1,182 @@ +"""Tests for the ExtensionLoader class.""" + +from unittest.mock import patch + +import pytest +from openbb_core.app.extension_loader import EntryPoint, ExtensionLoader, OpenBBGroups + + +@pytest.fixture(autouse=True) +def setup_and_teardown(): + """ + Fixture to run before and after each test function. + + This is necessary to reset the singleton instance of ExtensionLoader. + """ + # Code to run before each test function + yield # This is where the test function runs + # Code to run after each test function + # pylint: disable=protected-access + ExtensionLoader._instances = {} + + +def test_extension_loader(): + """Smoke test for extension loader.""" + extension_loader = ExtensionLoader() + assert extension_loader is not None + + +def test_extension_loader_singleton_prop(): + """Test the singleton property of extension loader.""" + extension_loader = ExtensionLoader() + extension_loader2 = ExtensionLoader() + assert extension_loader is extension_loader2 + + +def test_openbb_groups(): + """Test the OpenBBGroups enum.""" + assert len(OpenBBGroups) == 3 + assert OpenBBGroups.core.value == "openbb_core_extension" + assert OpenBBGroups.provider.value == "openbb_provider_extension" + assert OpenBBGroups.obbject.value == "openbb_obbject_extension" + + +def test_obbject_entry_points(): + """Test the obbject entry points property.""" + el = ExtensionLoader() + assert isinstance(el.obbject_entry_points, list) + + for ep in el.obbject_entry_points: + assert ep.group == OpenBBGroups.obbject.value + + +def test_core_entry_points(): + """Test the core entry points property.""" + el = ExtensionLoader() + assert isinstance(el.core_entry_points, list) + for ep in el.core_entry_points: + assert ep.group == OpenBBGroups.core.value + + +def test_provider_entry_points(): + """Test the provider entry points property.""" + el = ExtensionLoader() + assert isinstance(el.provider_entry_points, list) + for ep in el.provider_entry_points: + assert ep.group == OpenBBGroups.provider.value + + +def test_sorted_entry_points(): + """Test the _sorted_entry_points method.""" + # pylint: disable=protected-access + core_entry_points = ExtensionLoader._sorted_entry_points(OpenBBGroups.core.value) + for ep in core_entry_points: + assert ep.group == OpenBBGroups.core.value + + +def test_get_entry_point(): + """Test the _get_entry_point method.""" + el = ExtensionLoader() + # pylint: disable=protected-access + result = el._get_entry_point(el.provider_entry_points, "fmp") + if result: + assert result.group == OpenBBGroups.provider.value + assert result.name == "fmp" + + # pylint: disable=protected-access + result = el._get_entry_point(el.core_entry_points, "equity") + if result: + assert result.group == OpenBBGroups.core.value + assert result.name == "equity" + + +def test_get_entry_point_not_found(): + """Test the _get_entry_point method when the extension is not found.""" + el = ExtensionLoader() + # pylint: disable=protected-access + result = el._get_entry_point(el.core_entry_points, "random_extension") + assert result is None + + +@patch("openbb_core.app.extension_loader.ExtensionLoader._get_entry_point") +def test_get_obbject_entry_point(mock_get_entry_point): + """Test the get_obbject_entry_point method.""" + + mock_get_entry_point.return_value = EntryPoint( + name="mock_extension", group=OpenBBGroups.obbject.value, value="mock" + ) + + el = ExtensionLoader() + result = el.get_obbject_entry_point("mock_extension") + if result: + assert result.group == OpenBBGroups.obbject.value + assert result.name == "mock_extension" + + +@patch("openbb_core.app.extension_loader.ExtensionLoader._get_entry_point") +def test_get_entry_point_core(mock_get_entry_point): + """Test the get_core_entry_point method.""" + + mock_get_entry_point.return_value = EntryPoint( + name="mock_extension", group=OpenBBGroups.obbject.value, value="mock" + ) + + el = ExtensionLoader() + result = el.get_core_entry_point("mock_extension") + if result: + assert result.group == OpenBBGroups.obbject.value + assert result.name == "mock_extension" + + +@patch("openbb_core.app.extension_loader.ExtensionLoader._get_entry_point") +def test_get_entry_point_provider(mock_get_entry_point): + """Test the get_core_entry_point method.""" + + mock_get_entry_point.return_value = EntryPoint( + name="mock_extension", group=OpenBBGroups.obbject.value, value="mock" + ) + + el = ExtensionLoader() + result = el.get_provider_entry_point("mock_extension") + if result: + assert result.group == OpenBBGroups.obbject.value + assert result.name == "mock_extension" + + +def test_obbject_objects(): + """Test the obbject objects property.""" + # pylint: disable=import-outside-toplevel + from openbb_core.app.model.extension import Extension + + el = ExtensionLoader() + assert isinstance(el.obbject_objects, dict) + + for key, value in el.obbject_objects.items(): + assert isinstance(key, str) + assert isinstance(value, Extension) + + +def test_core_objects(): + """Test the core objects property.""" + # pylint: disable=import-outside-toplevel + from openbb_core.app.router import Router + + el = ExtensionLoader() + assert isinstance(el.core_objects, dict) + + for key, value in el.core_objects.items(): + assert isinstance(key, str) + assert isinstance(value, Router) + + +def test_provider_objects(): + """Test the provider objects property.""" + # pylint: disable=import-outside-toplevel + from openbb_core.provider.abstract.provider import Provider + + el = ExtensionLoader() + assert isinstance(el.provider_objects, dict) + + for key, value in el.provider_objects.items(): + assert isinstance(key, str) + assert isinstance(value, Provider) diff --git a/openbb_platform/core/tests/app/test_platform_router.py b/openbb_platform/core/tests/app/test_platform_router.py new file mode 100644 index 0000000000000000000000000000000000000000..0a9adbca9165ee5c0b2a899990f518bdbb451004 --- /dev/null +++ b/openbb_platform/core/tests/app/test_platform_router.py @@ -0,0 +1,215 @@ +"""Test router.py file.""" + +# pylint: disable=redefined-outer-name +# pylint: disable=unused-argument + + +from typing import List, Optional + +import pytest +from openbb_core.app.model.command_context import CommandContext +from openbb_core.app.model.obbject import OBBject +from openbb_core.app.provider_interface import ( + ExtraParams, + ProviderChoices, + StandardParams, +) +from openbb_core.app.router import ( + CommandMap, + Router, + RouterLoader, + SignatureInspector, +) +from pydantic import BaseModel, ConfigDict + + +class MockBaseModel(BaseModel): + """Mock BaseModel class.""" + + model_config = ConfigDict(extra="allow", populate_by_name=True) + + +@pytest.fixture(scope="module") +def router(): + """Set up router.""" + return Router() + + +def test_router_init(router): + """Test init.""" + assert router + + +def test_command(router): + """Test command.""" + + @router.command + async def valid_function() -> OBBject[Optional[List[int]]]: + return OBBject(results=[1, 2, 3]) + + assert valid_function + + +def test_include_router(router): + """Test include_router.""" + some_router = Router() + assert router.include_router(some_router) is None + + +@pytest.fixture(scope="module") +def router_loader(): + """Set up router_loader.""" + return RouterLoader() + + +def test_router_loader_init(router_loader): + """Test init.""" + assert router_loader + + +def test_from_extensions(router_loader): + """Test from_extensions.""" + assert router_loader.from_extensions() + + +@pytest.fixture(scope="module") +def signature_inspector(): + """Set up signature_inspector.""" + return SignatureInspector() + + +def test_signature_inspector_init(signature_inspector): + """Test init.""" + assert signature_inspector + + +def test_complete_signature(signature_inspector): + """Test complete_signature.""" + + async def sample_function( # type: ignore[empty-body] + cc: CommandContext, + provider_choices: ProviderChoices, + standard_params: StandardParams, + extra_params: ExtraParams, + ) -> OBBject: + pass + + model = "EquityHistorical" + + assert signature_inspector.complete(sample_function, model) + + +def test_complete_signature_error(signature_inspector): + """Test complete_signature.""" + + async def valid_function() -> OBBject[Optional[List[int]]]: + return OBBject(results=[1, 2, 3]) + + assert signature_inspector.complete(valid_function, "invalid_model") is None + + +def test_validate_signature(signature_inspector): + """Test validate_signature.""" + + async def sample_function( # type: ignore + cc: CommandContext, + provider_choices: ProviderChoices, + standard_params: StandardParams, + extra_params: ExtraParams, + ) -> OBBject: + pass + + expected_signature = { + "cc": CommandContext, + "provider_choices": ProviderChoices, + "standard_params": StandardParams, + "extra_params": ExtraParams, + } + + assert ( + signature_inspector.validate_signature(sample_function, expected_signature) + is None + ) + + +def test_inject_dependency(signature_inspector): + """Test inject_dependency.""" + + async def sample_function( # type: ignore + cc: CommandContext, + provider_choices: ProviderChoices, + standard_params: StandardParams, + extra_params: ExtraParams, + ) -> OBBject: + pass + + assert signature_inspector.inject_dependency(sample_function, "cc", CommandContext) + + +def test_get_description(signature_inspector): + """Test get_description.""" + + async def some_function(): + """Mock function.""" + + assert signature_inspector.get_description(some_function) == some_function.__doc__ + + +def test_get_description_no_doc(signature_inspector): + """Test get_description.""" + + async def some_function(): + pass + + assert not signature_inspector.get_description(some_function) + + +@pytest.fixture(scope="module") +def command_map(): + """Set up command_map.""" + return CommandMap() + + +def test_command_map_init(command_map): + """Test init.""" + assert command_map + + +def test_map(command_map): + """Test map.""" + assert isinstance(command_map.map, dict) + + +def test_provider_coverage(command_map): + """Test provider_coverage.""" + assert isinstance(command_map.provider_coverage, dict) + + +def test_command_coverage(command_map): + """Test command_coverage.""" + assert isinstance(command_map.command_coverage, dict) + + +def test_get_command_map(command_map, router): + """Test get_command_map.""" + command_map = command_map.get_command_map(router) + assert isinstance(command_map, dict) + assert len(command_map) > 0 + + +def test_get_provider_coverage(command_map, router): + """Test get_provider_coverage.""" + provider_coverage = command_map.get_provider_coverage(router) + assert isinstance(provider_coverage, dict) + + +def test_get_command_coverage(command_map, router): + """Test get_command_coverage.""" + command_coverage = command_map.get_command_coverage(router) + assert isinstance(command_coverage, dict) + + +def test_get_command(command_map): + """Test get_command.""" + command = command_map.get_command("stocks/load") + assert command is None diff --git a/openbb_platform/core/tests/app/test_provider_interface.py b/openbb_platform/core/tests/app/test_provider_interface.py new file mode 100644 index 0000000000000000000000000000000000000000..4062c52172c82c40f2957cbfc4e1a8dcaeadcf9d --- /dev/null +++ b/openbb_platform/core/tests/app/test_provider_interface.py @@ -0,0 +1,80 @@ +"""Test provider interface.""" + +# pylint: disable=redefined-outer-name + +import pytest +from openbb_core.app.provider_interface import ( + ProviderChoices, + ProviderInterface, +) + + +@pytest.fixture(scope="module") +def provider_interface(): + """Set up provider_interface.""" + return ProviderInterface() + + +def test_init(provider_interface): + """Test init.""" + assert provider_interface + + +def test_map(provider_interface): + """Test map.""" + provider_interface_map = provider_interface.map + assert isinstance(provider_interface_map, dict) + assert len(provider_interface_map) > 0 + assert "EquityHistorical" in provider_interface_map + + +def test_credentials(provider_interface): + """Test required credentials.""" + credentials = provider_interface.credentials + assert isinstance(credentials, dict) + assert len(credentials) > 0 + + +def test_model_providers(provider_interface): + """Test model providers.""" + model_providers = provider_interface.model_providers + assert isinstance(model_providers, dict) + assert len(model_providers) > 0 + + +def test_params(provider_interface): + """Test params.""" + params = provider_interface.params + assert isinstance(params, dict) + assert len(params) > 0 + assert "EquityHistorical" in params + + +def test_data(provider_interface): + """Test data.""" + data = provider_interface.data + assert isinstance(data, dict) + assert len(data) > 0 + assert "EquityHistorical" in data + + +def test_available_providers(provider_interface): + """Test providers literal.""" + available_providers = provider_interface.available_providers + assert isinstance(available_providers, list) + assert len(available_providers) > 0 + assert "openbb" not in available_providers + + +def test_provider_choices(provider_interface): + """Test provider choices.""" + provider_choices = provider_interface.provider_choices + assert isinstance(provider_choices, type(ProviderChoices)) + + +def test_models(provider_interface): + """Test models.""" + models = provider_interface.models + assert isinstance(models, list) + assert len(models) > 0 + assert "EquityHistorical" in models diff --git a/openbb_platform/core/tests/app/test_query.py b/openbb_platform/core/tests/app/test_query.py new file mode 100644 index 0000000000000000000000000000000000000000..082ab43db4e6df01a33790dcd0051e78dd505c9f --- /dev/null +++ b/openbb_platform/core/tests/app/test_query.py @@ -0,0 +1,131 @@ +"""Test the Query class.""" + +# pylint: disable=redefined-outer-name + +from dataclasses import dataclass +from unittest.mock import MagicMock, patch + +import pytest +from openbb_core.app.model.command_context import CommandContext +from openbb_core.app.provider_interface import ( + ExtraParams, + ProviderChoices, + StandardParams, +) +from openbb_core.app.query import Query +from pydantic import BaseModel, ConfigDict + + +class MockBaseModel(BaseModel): + """Mock QueryParams class.""" + + model_config = ConfigDict(extra="allow", populate_by_name=True) + + +def create_mock_query(): + """Mock query.""" + + class EquityHistorical: + """Mock EquityHistorical class.""" + + start_date = "2020-01-01" + end_date = "2020-01-05" + symbol = "AAPL" + + return EquityHistorical() + + +def create_mock_extra_params(): + """Mock ExtraParams dataclass.""" + + @dataclass + class EquityHistorical: + """Mock ExtraParams dataclass.""" + + sort: str = "desc" + + return EquityHistorical() + + +@pytest.fixture(scope="module") +def query(): + """Set up query.""" + return Query( + cc=CommandContext(), + provider_choices=ProviderChoices(provider="fmp"), + standard_params=StandardParams(), + extra_params=ExtraParams(), + ) + + +def test_init(query): + """Test init.""" + assert query + + +@pytest.fixture +def mock_registry(): + """Mock registry.""" + with patch( + "openbb_core.app.provider_interface.ProviderInterface" + ) as mock_get_provider_interface: + mock_registry = MagicMock() + mock_get_provider_interface.return_value.build_registry.return_value = ( + mock_registry + ) + yield mock_registry + + +@pytest.fixture +def query_instance(): + """Set up query.""" + standard_params = create_mock_query() + extra_params = create_mock_extra_params() + + cc = CommandContext() + setattr( + cc.user_settings.credentials, "fmp_api_key", "1234" # pylint: disable=no-member + ) + + return Query( + cc=cc, + provider_choices=ProviderChoices(provider="fmp"), + standard_params=standard_params, + extra_params=extra_params, + ) + + +def test_filter_extra_params(query): + """Test filter_extra_params.""" + extra_params = create_mock_extra_params() + extra_params = query.filter_extra_params(extra_params, "fmp") + + assert isinstance(extra_params, dict) + assert len(extra_params) == 0 + + +def test_filter_extra_params_wrong_param(query): + """Test filter_extra_params.""" + + @dataclass + class EquityHistorical: + """Mock ExtraParams dataclass.""" + + sort: str = "desc" + limit: int = 4 + + extra_params = EquityHistorical() + + extra = query.filter_extra_params(extra_params, "fmp") + assert isinstance(extra, dict) + assert len(extra) == 0 + + +@pytest.mark.asyncio +async def test_execute_method_fake_credentials(query_instance: Query, mock_registry): + """Test execute method without setting credentials.""" + mock_fetch_result = MockBaseModel() + mock_registry.fetch.return_value = mock_fetch_result + + with pytest.raises(Exception): + await query_instance.execute() diff --git a/openbb_platform/core/tests/app/test_utils.py b/openbb_platform/core/tests/app/test_utils.py new file mode 100644 index 0000000000000000000000000000000000000000..ab07be91597d08206944083f13edae3087ac43d4 --- /dev/null +++ b/openbb_platform/core/tests/app/test_utils.py @@ -0,0 +1,160 @@ +"""OpenBB Platform Core app utils tests.""" + +import numpy as np +import pandas as pd +import pytest +from openbb_core.app.model.abstract.error import OpenBBError +from openbb_core.app.utils import ( + basemodel_to_df, + check_single_item, + df_to_basemodel, + dict_to_basemodel, + get_target_column, + get_target_columns, + list_to_basemodel, + ndarray_to_basemodel, +) +from openbb_core.provider.abstract.data import Data + +# pylint: disable=W0621 + +df = pd.DataFrame( + { + "x": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + "y": [5, 4, 3, 9, 44, 5, 66, 11, 777, 1], + "z": [1, 2, 3, 4, 5, 6, 7, 8, 9, 10], + }, +) + +df_multiindex = df.set_index(["x", "y"]) + +simple_base_model = [ + Data(x=i, y=j, z=k) for i in range(2) for j in range(6, 8) for k in range(10, 12) # type: ignore[call-arg] +] + +multi_index_base_model = [ + Data(x=i, y=j, z=k, is_multiindex=True, multiindex_names="['x','y']") # type: ignore[call-arg] + for i in range(2) + for j in range(6, 8) + for k in range(10, 12) +] + + +def test_df_to_basemodel(): + """Test the df_to_basemodel helper.""" + base_model = df_to_basemodel(df) + assert isinstance(base_model, list) + assert base_model[0].x == 1 # type: ignore[attr-defined] + + +def test_df_to_basemodel_multiindex(): + """Test the df_to_basemodel helper with a multi-index DataFrame.""" + base_model = df_to_basemodel(df_multiindex) + assert isinstance(base_model, list) + assert hasattr(base_model[0], "is_multiindex") + + +def test_basemodel_to_df(): + """Test the basemodel_to_df helper.""" + df = basemodel_to_df(simple_base_model) + assert isinstance(df, pd.DataFrame) + assert df.shape == (8, 3) + + +def test_basemodel_to_multiindex_df(): + """Test the basemodel_to_df helper with a multi-index DataFrame.""" + df = basemodel_to_df(multi_index_base_model) + assert isinstance(df, pd.DataFrame) + assert isinstance(df.index, pd.MultiIndex) + + +def test_get_target_column(): + """Test the get_target_column helper.""" + target = get_target_column(df, "x") + assert isinstance(target, pd.Series) + assert target[0] == 1 + + +def test_get_target_columns(): + """Test the get_target_columns helper.""" + targets = get_target_columns(df, ["x", "y"]) + assert isinstance(targets, pd.DataFrame) + assert targets.shape == (10, 2) + + +@pytest.mark.parametrize( + "data_list, expected", + [ + # List of dictionaries + ([{"a": 1}, {"b": 2}], [Data(a=1), Data(b=2)]), + # List with a single DataFrame + ([pd.DataFrame({"c": [3, 4]})], [Data(c=3), Data(c=4)]), + # List with mixed types + ([{"d": 5}, pd.Series([6, 7], name="e")], [Data(d=5), Data(e=6), Data(e=7)]), + ], +) +def test_list_to_basemodel(data_list, expected): + """Test the list_to_basemodel helper.""" + result = list_to_basemodel(data_list) + for r, e in zip(result, expected): + assert r.model_dump() == e.model_dump() + + +@pytest.mark.parametrize( + "data_dict, expected", + [ + # Simple dictionary + ({"a": 10}, Data(a=10)), # type: ignore[call-arg] + # Nested dictionary (assuming Data can handle nested dicts) + ({"b": {"c": 20}}, Data(b={"c": 20})), # type: ignore[call-arg] + # Dictionary with list (assuming Data can handle lists) + ({"d": [30, 40]}, Data(d=[30, 40])), # type: ignore[call-arg] + ], +) +def test_dict_to_basemodel(data_dict, expected): + """Test the dict_to_basemodel helper.""" + result = dict_to_basemodel(data_dict) + assert result.model_dump() == expected.model_dump() + + +@pytest.mark.parametrize( + "array, expected", + [ + # 2D array with single row + (np.array([[1, 2]]), [Data(column_0=1, column_1=2)]), + # 2D array with multiple rows + ( + np.array([[3, 4], [5, 6]]), + [Data(column_0=3, column_1=4), Data(column_0=5, column_1=6)], + ), + # 2D array with non-numeric data + ( + np.array([["a", "b"], ["c", "d"]]), + [Data(column_0="a", column_1="b"), Data(column_0="c", column_1="d")], + ), + ], +) +def test_ndarray_to_basemodel(array, expected): + """Test the ndarray_to_basemodel helper.""" + result = ndarray_to_basemodel(array) + for r, e in zip(result, expected): + assert r.model_dump() == e.model_dump() + + +@pytest.mark.parametrize( + "item, expected", + [ + ("SYMBOL", "SYMBOL"), + (None, None), + ("", ""), + ("SYMBOL1,SYMBOL2", OpenBBError), + ("SYMBOL1;SYMBOL2", OpenBBError), + ], +) +def test_check_single_item(item, expected): + """Test the check_single_item helper.""" + if expected is OpenBBError: + with pytest.raises(OpenBBError): + check_single_item(item) + else: + assert check_single_item(item) == expected diff --git a/openbb_platform/core/tests/provider/abstract/test_data.py b/openbb_platform/core/tests/provider/abstract/test_data.py new file mode 100644 index 0000000000000000000000000000000000000000..0845b6cd4c790ea01de65dbe074fcf10e575e8c5 --- /dev/null +++ b/openbb_platform/core/tests/provider/abstract/test_data.py @@ -0,0 +1,43 @@ +"""Test the Data.""" + +# pylint: disable=C2801 + +import pytest +from openbb_core.provider.abstract.data import Data, check_int + + +def test_check_int_valid(): + """Test if the check_int function returns the value when it is an int.""" + assert check_int(10) == 10 + + +def test_check_int_invalid(): + """Test if the check_int function raises an error when the value is not an int.""" + with pytest.raises(TypeError): + check_int("not_an_integer") # type: ignore[arg-type] + + +def test_data_model(): + """Test the Data model.""" + some_data = Data(test="test") # type: ignore[call-arg] + + assert some_data.test == "test" # type: ignore[attr-defined] + assert not some_data.__alias_dict__ + assert some_data.__repr__() == "Data(test=test)" + assert some_data.model_dump() == {"test": "test"} + + +def test_data_model_alias(): + """Test the Data model with an alias.""" + + class SomeData(Data): + """Some data.""" + + __alias_dict__ = {"test_alias": "test"} + + some_data = SomeData(test="Hello") # type: ignore[call-arg] + + assert some_data.__alias_dict__ == {"test_alias": "test"} + assert some_data.__repr__() == "SomeData(test_alias=Hello)" + assert some_data.model_dump() == {"test_alias": "Hello"} + assert some_data.test_alias == "Hello" # type: ignore[attr-defined] diff --git a/openbb_platform/core/tests/provider/abstract/test_fetcher.py b/openbb_platform/core/tests/provider/abstract/test_fetcher.py new file mode 100644 index 0000000000000000000000000000000000000000..ff9790e8267a401c1781b4b0f5da22a51faa5a22 --- /dev/null +++ b/openbb_platform/core/tests/provider/abstract/test_fetcher.py @@ -0,0 +1,70 @@ +"""Test the Fetcher.""" + +from typing import Any, Dict, List, Optional + +import pytest +from openbb_core.provider.abstract.fetcher import Data, Fetcher, QueryParams + +# Step 1: Create a dummy subclass of Fetcher + + +class MockData(Data): + """Mock data class.""" + + +class MockQueryParams(QueryParams): + """Mock query params class.""" + + +class MockFetcher(Fetcher[MockQueryParams, List[MockData]]): + """Mock fetcher class.""" + + @staticmethod + def transform_query(params: Dict[str, Any]) -> MockQueryParams: + """Transform the params to the provider-specific query.""" + return MockQueryParams() + + @staticmethod + def extract_data( + query: MockQueryParams, credentials: Optional[Dict[str, str]] + ) -> Any: + """Extract the data from the provider.""" + return [{"mock_key": "mock_value"}] # Mocking a data response + + @staticmethod + def transform_data(query: MockQueryParams, data: Any, **kwargs) -> List[MockData]: + """Transform the provider-specific data.""" + return [MockData(**item) for item in data] + + +@pytest.mark.asyncio +async def test_fetcher_methods(): + """Test the Fetcher abstract methods using a mock Fetcher subclass.""" + params = {"param1": "value1"} + mock_fetcher = MockFetcher() + + fetched_data = await mock_fetcher.fetch_data(params=params) + assert isinstance(fetched_data, list) + assert isinstance(fetched_data[0], MockData) + assert fetched_data[0].model_dump() == {"mock_key": "mock_value"} + + +def test_fetcher_query_params_type(): + """Test the query_params_type classproperty.""" + assert MockFetcher.query_params_type == MockQueryParams + + +def test_fetcher_return_type(): + """Test the return_type classproperty.""" + assert MockFetcher.return_type == List[MockData] + + +def test_fetcher_data_type(): + """Test the data_type classproperty.""" + assert MockFetcher.data_type == MockData + + +def test_fetcher_test(): + """Test the test method.""" + tested = MockFetcher.test(params={}) + assert tested is None diff --git a/openbb_platform/core/tests/provider/abstract/test_provider.py b/openbb_platform/core/tests/provider/abstract/test_provider.py new file mode 100644 index 0000000000000000000000000000000000000000..35c51cfa50ae382af9ecfffba8b12b7e6ef93d98 --- /dev/null +++ b/openbb_platform/core/tests/provider/abstract/test_provider.py @@ -0,0 +1,45 @@ +"""Test the Provider.""" + +from openbb_core.provider.abstract.fetcher import Fetcher +from openbb_core.provider.abstract.provider import Provider + + +def test_provider_initialization(): + """Test the basic initialization of the Provider class.""" + provider = Provider(name="TestProvider", description="A simple test provider.") + + assert provider.name == "TestProvider" + assert provider.description == "A simple test provider." + assert provider.website is None + assert provider.credentials == [] + assert provider.fetcher_dict == {} + + +def test_provider_with_optional_parameters(): + """Test the initialization of the Provider class with optional parameters.""" + provider = Provider( + name="TestProvider", + description="A simple test provider.", + website="https://testprovider.example.com", + credentials=["api_key"], + fetcher_dict={"fetcher1": Fetcher}, + ) + + assert provider.name == "TestProvider" + assert provider.description == "A simple test provider." + assert provider.website == "https://testprovider.example.com" + assert provider.credentials == ["testprovider_api_key"] + assert provider.fetcher_dict == {"fetcher1": Fetcher} + + +def test_provider_credentials_formatting(): + """Test the formatting of required credentials.""" + credentials = ["key1", "key2"] + provider = Provider( + name="TestProvider", + description="A simple test provider.", + credentials=credentials, + ) + + expected_credentials = ["testprovider_key1", "testprovider_key2"] + assert provider.credentials == expected_credentials diff --git a/openbb_platform/core/tests/provider/abstract/test_query_params.py b/openbb_platform/core/tests/provider/abstract/test_query_params.py new file mode 100644 index 0000000000000000000000000000000000000000..25ee7aa4776351992a5aba40cfa59939316b01ef --- /dev/null +++ b/openbb_platform/core/tests/provider/abstract/test_query_params.py @@ -0,0 +1,30 @@ +"""Test QueryParams.""" + +from openbb_core.provider.abstract.query_params import QueryParams + + +def test_query_params_repr(): + """Test the __repr__ method of QueryParams.""" + params = QueryParams(param1="value1", param2="value2") # type: ignore[call-arg] + assert "param1='value1'" in str(params) + assert "param2='value2'" in str(params) + + +def test_query_params_no_alias(): + """Test model_dump without aliases.""" + params = QueryParams(param1="value1", param2="value2") # type: ignore[call-arg] + dumped_params = params.model_dump() + + assert dumped_params == {"param1": "value1", "param2": "value2"} + + +def test_query_params_with_alias(): + """Test model_dump with aliases.""" + + class AliasedQueryParams(QueryParams): + __alias_dict__ = {"param1": "alias1"} + + params = AliasedQueryParams(param1="value1", param2="value2") # type: ignore[call-arg] + dumped_params = params.model_dump() + + assert dumped_params == {"alias1": "value1", "param2": "value2"} diff --git a/openbb_platform/core/tests/provider/standard_models/test_standard_models.py b/openbb_platform/core/tests/provider/standard_models/test_standard_models.py new file mode 100644 index 0000000000000000000000000000000000000000..c82776bfc380a9baa8c15e7435aec129408b8feb --- /dev/null +++ b/openbb_platform/core/tests/provider/standard_models/test_standard_models.py @@ -0,0 +1,69 @@ +"""Test the standard models.""" + +# pylint: disable=W0401 + +import inspect +from importlib import import_module +from pathlib import Path + +import pytest +from openbb_core.provider.abstract.fetcher import Data, QueryParams +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) +from pydantic.fields import FieldInfo + +models_path = ( + Path(__file__).parent.parent.parent.parent + / "openbb_core" + / "provider" + / "standard_models" +).resolve() +model_files = models_path.glob("*.py") + +standard_models = [] +for model_file in model_files: + if model_file.stem == "__init__": + continue + + model_module = import_module( + f"openbb_core.provider.standard_models.{model_file.stem}" + ) + for _, obj in inspect.getmembers(model_module): + if inspect.isclass(obj) and ( + issubclass(obj, Data) or issubclass(obj, QueryParams) + ): + if "abstract" in obj.__module__: + continue + standard_models.append(obj) + + +@pytest.mark.parametrize("standard_model", standard_models) +def test_standard_models(standard_model): + """Test the standard models.""" + assert issubclass(standard_model, Data) or issubclass( + standard_model, QueryParams + ), f"{standard_model.__name__} should be a subclass of Data or QueryParams" + + fields = standard_model.model_fields + + for name, field in fields.items(): + assert isinstance( + field, FieldInfo + ), f"Field {name} should be a ModelField instance" + if "QueryParams" in standard_model.__name__: + if name in QUERY_DESCRIPTIONS: + assert QUERY_DESCRIPTIONS[name] in getattr(field, "description"), ( + f"Description for {name} is incorrect for the {standard_model.__name__}.\n" + f"Please modify the description or change the field name to a non-reserved name." + f"To get a full list of reserved descriptions, navigate to openbb_core.provider.utils.descriptions.py" + f"You can also add extra information to the existing reserved field description in your model." + ) + elif name in DATA_DESCRIPTIONS: + assert DATA_DESCRIPTIONS[name] in getattr(field, "description"), ( + f"Description for {name} is incorrect for the {standard_model.__name__}.\n" + f"Please modify the description or change the field name to a non-reserved name." + f"To get a full list of reserved descriptions, navigate to openbb_core.provider.utils.descriptions.py" + f"You can also add extra information to the existing reserved field description in your model." + ) diff --git a/openbb_platform/core/tests/provider/test_query_executor.py b/openbb_platform/core/tests/provider/test_query_executor.py new file mode 100644 index 0000000000000000000000000000000000000000..9c75ff7cc06898169dd8994d7027be3b1dfc15de --- /dev/null +++ b/openbb_platform/core/tests/provider/test_query_executor.py @@ -0,0 +1,122 @@ +"""Test the Query Executor.""" + +# pylint: disable=W0621 + +from unittest.mock import MagicMock, patch + +import pytest +from openbb_core.app.model.abstract.error import OpenBBError +from openbb_core.provider.abstract.fetcher import Fetcher +from openbb_core.provider.abstract.provider import Provider +from openbb_core.provider.query_executor import QueryExecutor +from pydantic import SecretStr + + +@pytest.fixture +def mock_query_executor(): + """Mock the query executor.""" + registry = MagicMock() + registry.providers = { + "test_provider": Provider( + name="Test", + description="Test provider", + fetcher_dict={"test_fetcher": Fetcher}, + ), + } + executor = QueryExecutor(registry=registry) + return executor + + +def test_get_provider_success(mock_query_executor): + """Test if the method can retrieve a provider successfully.""" + provider = mock_query_executor.get_provider("test_provider") + assert provider.name == "Test" + + +def test_get_provider_failure(mock_query_executor): + """Test if the method fails properly when the provider does not exist.""" + with pytest.raises(OpenBBError, match="Provider 'nonexistent' not found"): + mock_query_executor.get_provider("nonexistent") + + +def test_get_fetcher_success(mock_query_executor): + """Test if the method can retrieve a fetcher successfully.""" + provider = mock_query_executor.get_provider("test_provider") + fetcher = mock_query_executor.get_fetcher(provider, "test_fetcher") + assert issubclass(fetcher, Fetcher) + + +def test_get_fetcher_failure(mock_query_executor): + """Test if the method fails properly when the fetcher does not exist.""" + provider = mock_query_executor.get_provider("test_provider") + with pytest.raises(OpenBBError, match="Fetcher not found"): + mock_query_executor.get_fetcher(provider, "nonexistent_fetcher") + + +def test_filter_credentials_success(mock_query_executor): + """Test if credentials are properly filtered.""" + provider = mock_query_executor.get_provider("test_provider") + provider.credentials = ["test_provider_api_key"] + credentials = { + "test_provider_api_key": SecretStr("12345"), + "other_api_key": SecretStr("12345"), + } + + filtered_credentials = mock_query_executor.filter_credentials( + credentials, provider, True + ) + + assert filtered_credentials == {"test_provider_api_key": "12345"} + + +def test_filter_credentials_missing_require(mock_query_executor): + """Test if the proper error is raised when a credential is missing.""" + provider = mock_query_executor.get_provider("test_provider") + provider.credentials = ["test_provider_api_key"] + credentials = {"other_api_key": SecretStr("12345")} + + with pytest.raises(OpenBBError, match="Missing credential"): + mock_query_executor.filter_credentials(credentials, provider, True) + + +def test_filter_credentials_empty_require(mock_query_executor): + """Test if the proper error is raised when a credential is missing.""" + provider = mock_query_executor.get_provider("test_provider") + provider.credentials = ["test_provider_api_key"] + credentials = { + "test_provider_api_key": SecretStr(""), + "other_api_key": SecretStr("12345"), + } + + with pytest.raises(OpenBBError, match="Missing credential"): + mock_query_executor.filter_credentials(credentials, provider, True) + + +def test_filter_credentials_missing_dont_require(mock_query_executor): + """Test if the proper error is raised when a credential is missing.""" + provider = mock_query_executor.get_provider("test_provider") + provider.credentials = ["test_provider_api_key"] + credentials = {"other_api_key": SecretStr("12345")} + + filtered_credentials = mock_query_executor.filter_credentials( + credentials, provider, False + ) + + assert filtered_credentials == {} + + +@pytest.mark.asyncio +async def test_execute_success(mock_query_executor: QueryExecutor): + """Test if the method can execute a query successfully.""" + mock_result = {"data": "test_data"} + + params = {"param1": "value1"} + credentials = {"api_key": SecretStr("12345")} + + with patch.object(Fetcher, "fetch_data", return_value=mock_result) as mock_fetch: + result = await mock_query_executor.execute( + "test_provider", "test_fetcher", params, credentials + ) + + assert result == mock_result + mock_fetch.assert_called_once_with(params, {}, **{}) diff --git a/openbb_platform/core/tests/provider/test_registry.py b/openbb_platform/core/tests/provider/test_registry.py new file mode 100644 index 0000000000000000000000000000000000000000..a0e2aceacccee0ca231e8307ae18e07239413a0e --- /dev/null +++ b/openbb_platform/core/tests/provider/test_registry.py @@ -0,0 +1,30 @@ +"""Test the Registry.""" + +from openbb_core.provider.abstract.provider import Provider +from openbb_core.provider.registry import Registry, RegistryLoader + + +def test_registry(): + """Test the registry.""" + registry = Registry() + assert registry.providers == {} + + mock_provider = Provider(name="TestProvider", description="Just a test provider.") + registry.include_provider(mock_provider) + + assert "testprovider" in registry.providers + assert registry.providers["testprovider"] == mock_provider + + +def test_registry_loader_integration(): + """Execute the loading process.""" + core_providers = ["fmp", "polygon", "fred", "benzinga", "intrinio"] + registry = RegistryLoader.from_extensions() + + assert len(registry.providers) > 0 + + for provider in core_providers: + assert provider in registry.providers + + for provider in registry.providers.values(): + assert isinstance(provider, Provider) diff --git a/openbb_platform/core/tests/provider/test_registry_map.py b/openbb_platform/core/tests/provider/test_registry_map.py new file mode 100644 index 0000000000000000000000000000000000000000..052a4c735d482ce271adf3000e3c88d1fdfeaf6f --- /dev/null +++ b/openbb_platform/core/tests/provider/test_registry_map.py @@ -0,0 +1,41 @@ +"""Test the registry map.""" + +# pylint: disable=W0621 + +import pytest +from openbb_core.provider.registry_map import RegistryMap + + +@pytest.fixture +def load_registry_map(): + """Mock the registry map.""" + return RegistryMap() + + +def test_get_credentials(load_registry_map): + """Test if the _get_credentials method behaves as expected.""" + required_creds = load_registry_map.credentials + + assert "fmp" in required_creds + assert required_creds["fmp"] == ["fmp_api_key"] + + +def test_get_available_providers(load_registry_map): + """Test if the _get_available_providers method behaves as expected.""" + available_providers = load_registry_map.available_providers + + assert "fmp" in available_providers + assert len(available_providers) > 0 + + +def test_map_and_models(load_registry_map): + """Test if the _get_map method behaves as expected.""" + standard_extra, original_models = ( + load_registry_map.standard_extra, + load_registry_map.original_models, + ) + models = load_registry_map.models + + assert "EquityHistorical" in standard_extra + assert "EquityHistorical" in original_models + assert "EquityHistorical" in models diff --git a/openbb_platform/core/tests/provider/utils/test_client.py b/openbb_platform/core/tests/provider/utils/test_client.py new file mode 100644 index 0000000000000000000000000000000000000000..5b018994d9b4eb5c5ac98871d160cb084a622e20 --- /dev/null +++ b/openbb_platform/core/tests/provider/utils/test_client.py @@ -0,0 +1,206 @@ +"""Test the client helper.""" + +import gzip +import json +import zlib + +import aiohttp +import pytest +from multidict import CIMultiDict, CIMultiDictProxy +from openbb_core.provider.utils import client +from yarl import URL + + +def test_obfuscate(): + """Test the obfuscate helper.""" + params = CIMultiDict( + { + "api_key": "1234", + "token": "1234", + "auth": "1234", + "auth_token": "1234", + "c": "1234", + "api_key2": "1234", + } + ) + + assert client.obfuscate(params) == { + "api_key": "********", + "token": "********", + "auth": "********", + "auth_token": "********", + "c": "********", + "api_key2": "********", + } + + +def test_get_user_agent(): + """Test the get_user_agent helper.""" + user_agent = client.get_user_agent() + assert "Mozilla/5.0" in user_agent + + +class MockResponse: + """Mock response class.""" + + def __init__(self, method, url, **kwargs): + """Initialize.""" + self.url = URL(url) + self.method = method + self.body = kwargs.get("body", {"test": "test"}) + self.status = kwargs.get("status", 200) + self.headers = kwargs.get("headers", {}) + + request_info = aiohttp.RequestInfo( + url=self.url, + method=method, + headers=CIMultiDictProxy(CIMultiDict(self.headers)), + real_url=self.url, + ) + self.request_info = client.ClientResponse.obfuscate_request_info(request_info) + + async def json(self, **_): + """Return the json response.""" + return self.body + + async def read(self): + """Return the response body.""" + return self.body + + def raise_for_status(self): + """Raise an exception.""" + raise Exception("Test") + + +class MockClientSession(client.ClientSession): + """Mock ClientSession.""" + + # pylint: disable=unused-argument,signature-differs + def __del__(self): # type: ignore + """Delete the session.""" + + async def request( # type: ignore + self, *args, raise_for_status: bool = False, **kwargs + ) -> client.ClientResponse: + """Mock the request method.""" + response = MockResponse(*args, **kwargs) + + if raise_for_status: + response.raise_for_status() + + encoding = response.headers.get("Content-Encoding", "") + if encoding in ("gzip", "deflate") and not self.auto_decompress: + response_body = await response.read() + wbits = 16 + zlib.MAX_WBITS if encoding == "gzip" else -zlib.MAX_WBITS + response.body = json.loads( + zlib.decompress(response_body, wbits).decode("utf-8") + ) + + return response # type: ignore + + +@pytest.mark.parametrize( + "url_params, obfuscated_params", + [ + ( + "?api_key=1234&token=1234", + "?api_key=********&token=********", + ), + ( + "?symbol=TSLA&api_key=1234", + "?symbol=TSLA&api_key=********", + ), + ( + "?auth_token=1234&c=1234", + "?auth_token=********&c=********", + ), + ( + "?auth=1234&c=1234", + "?auth=********&c=********", + ), + ( + "?api_key2=1234&cc=1234&some_token=1234", + "?api_key2=********&cc=1234&some_token=********", + ), + ], +) +@pytest.mark.asyncio +async def test_client_response_obfuscate_request_info(url_params, obfuscated_params): + """Test the ClientSession post helper.""" + headers = {"Authorization": "Bearer 1234"} + + response = await MockClientSession().get( + f"http://mock.url{url_params}", headers=headers + ) + + assert isinstance(response, MockResponse) + assert response.request_info.url == URL(f"http://mock.url{obfuscated_params}") + + assert response.request_info.headers == CIMultiDictProxy( + CIMultiDict({"Authorization": "********"}) + ) + + +@pytest.mark.asyncio +async def test_client_get(): + """Test the ClientSession get helper.""" + response = await MockClientSession().get("http://mock.url") + assert isinstance(response, MockResponse) + assert response.method == "GET" + assert response.status == 200 + assert response.body == {"test": "test"} + assert response.request_info.url == URL("http://mock.url") + + +@pytest.mark.asyncio +async def test_client_post(): + """Test the ClientSession post helper.""" + + response = await MockClientSession().post("http://mock.url") + assert isinstance(response, MockResponse) + assert response.method == "POST" + assert response.status == 200 + assert response.body == {"test": "test"} + assert response.request_info.url == URL("http://mock.url") + + +@pytest.mark.parametrize( + "body, expected", + [ + ([{"test": "test"}, {"test": "test"}], {"test": "test"}), + ({"test": "test"}, {"test": "test"}), + ], +) +@pytest.mark.asyncio +async def test_client_get_one(body, expected): + """Test the ClientSession get_one helper.""" + + response = await MockClientSession().get_one("http://mock.url", body=body) + + assert isinstance(response, dict) + assert response == expected + + +@pytest.mark.asyncio +async def test_client_get_json(): + """Test the ClientSession get_json helper.""" + + response = await MockClientSession().get_json("http://mock.url") + + assert isinstance(response, dict) + assert response == {"test": "test"} + + +@pytest.mark.asyncio +async def test_client_content_encoding(): + """Test the ClientSession encode helper.""" + json_data = json.dumps({"test": "test"}, indent=2) + + response = await MockClientSession().get_json( + "http://mock.url", + body=gzip.compress(json_data.encode("utf-8")), + headers={"Content-Encoding": "gzip"}, + ) + + assert isinstance(response, dict) + assert response == {"test": "test"} diff --git a/openbb_platform/core/tests/provider/utils/test_descriptions.py b/openbb_platform/core/tests/provider/utils/test_descriptions.py new file mode 100644 index 0000000000000000000000000000000000000000..df45cee5f8ae658ed469ff5b4ef5f7c0483826d3 --- /dev/null +++ b/openbb_platform/core/tests/provider/utils/test_descriptions.py @@ -0,0 +1,16 @@ +"""Test the provider descriptions.""" + +from openbb_core.provider.utils.descriptions import ( + DATA_DESCRIPTIONS, + QUERY_DESCRIPTIONS, +) + + +def test_query_descriptions(): + """Test the query descriptions.""" + assert QUERY_DESCRIPTIONS + + +def test_data_descriptions(): + """Test the data descriptions.""" + assert DATA_DESCRIPTIONS diff --git a/openbb_platform/core/tests/provider/utils/test_errors.py b/openbb_platform/core/tests/provider/utils/test_errors.py new file mode 100644 index 0000000000000000000000000000000000000000..93c5b7c67ad39cb7998815c3a13b796eb47a8abb --- /dev/null +++ b/openbb_platform/core/tests/provider/utils/test_errors.py @@ -0,0 +1,39 @@ +"""Test custom errors.""" + +import pytest +from openbb_core.app.model.abstract.error import OpenBBError +from openbb_core.provider.utils.errors import EmptyDataError + + +def function_that_raises_provider_error(): + """Raise a OpenBBError.""" + raise OpenBBError("An error occurred in the provider.") + + +def function_that_raises_empty_data_error(): + """Raise an EmptyDataError.""" + raise EmptyDataError() + + +def test_provider_error_is_raised(): + """Test if the OpenBBError is raised.""" + with pytest.raises(OpenBBError) as exc_info: + function_that_raises_provider_error() + assert str(exc_info.value) == "An error occurred in the provider." + + +def test_empty_data_error_is_raised(): + """Test if the EmptyDataError is raised.""" + with pytest.raises(EmptyDataError) as exc_info: + function_that_raises_empty_data_error() + assert ( + str(exc_info.value) == "No results found. Try adjusting the query parameters." + ) + + +def test_empty_data_error_custom_message(): + """Test if the EmptyDataError is raised with a custom message.""" + custom_message = "Custom message for no data." + with pytest.raises(EmptyDataError) as exc_info: + raise EmptyDataError(custom_message) + assert str(exc_info.value) == custom_message diff --git a/openbb_platform/core/tests/provider/utils/test_helpers.py b/openbb_platform/core/tests/provider/utils/test_helpers.py new file mode 100644 index 0000000000000000000000000000000000000000..18bcc4b1ac59e4ea0ff46b21ef850bff2607a6a8 --- /dev/null +++ b/openbb_platform/core/tests/provider/utils/test_helpers.py @@ -0,0 +1,143 @@ +"""Test the provider helpers.""" + +import pytest +from openbb_core.provider.utils.client import ClientSession +from openbb_core.provider.utils.helpers import ( + amake_request, + amake_requests, + get_querystring, + get_requests_session, + make_request, + to_snake_case, +) + +# pylint: disable=unused-argument + + +class MockResponse: + """Mock the response.""" + + def __init__(self): + """Initialize the mock response.""" + self.status_code = 200 + self.status = 200 + + async def json(self): + """Return the json response.""" + return {"test": "test"} + + +class MockSession: + """Mock the ClientSession.""" + + def __init__(self): + """Initialize the mock session.""" + self.response = MockResponse() + + async def request(self, *args, **kwargs): # pylint: disable=unused-argument + """Mock the ClientSession.request method.""" + if kwargs.get("raise_for_status", False): + raise Exception("Test") + + return self.response + + @staticmethod + async def mock_callback(response, session): + """Mock the response_callback.""" + assert response.status == 200 + return await response.json() + + +def test_get_querystring_exclude(): + """Test the get_querystring helper.""" + items = { + "key1": "value1", + "key2": "value2", + "key3": None, + "key4": ["value3", "value4"], + } + exclude = ["key2"] + + querystring = get_querystring(items, exclude) + assert querystring == "key1=value1&key4=value3&key4=value4" + + +def test_get_querystring_no_exclude(): + """Test the get_querystring helper with no exclude list.""" + items = { + "key1": "value1", + "key2": "value2", + "key3": None, + "key4": ["value3", "value4"], + } + + querystring = get_querystring(items, []) + assert querystring == "key1=value1&key2=value2&key4=value3&key4=value4" + + +def test_make_request(monkeypatch): + """Test the make_request helper.""" + + def mock_get(*args, **kwargs): + """Mock the requests.get method.""" + return MockResponse() + + client_session = get_requests_session() + monkeypatch.setattr(client_session, "get", mock_get) + + response = make_request("http://mock.url", session=client_session) + assert response.status_code == 200 + + with pytest.raises(ValueError): + make_request("http://mock.url", method="PUT") + + +def test_to_snake_case(): + """Test the to_snake_case helper.""" + assert to_snake_case("SomeRandomString") == "some_random_string" + assert to_snake_case("someRandomString") == "some_random_string" + assert to_snake_case("already_snake_case") == "already_snake_case" + + +@pytest.mark.asyncio +async def test_amake_request(monkeypatch): + """Test the amake_request helper.""" + + mock_callback = MockSession.mock_callback + + client_session = MockSession() + monkeypatch.setattr(ClientSession, "request", client_session.request) + + response = await amake_request("http://mock.url", response_callback=mock_callback) + assert response == {"test": "test"} + + with pytest.raises(Exception): + await amake_request( + "http://mock.url", + response_callback=mock_callback, + raise_for_status=True, + ) + + with pytest.raises(ValueError): + await amake_request("http://mock.url", method="PUT") # type: ignore[arg-type] + + +@pytest.mark.asyncio +async def test_amake_requests(monkeypatch): + """Test the amake_requests helper.""" + + mock_callback = MockSession.mock_callback + + client_session = MockSession() + monkeypatch.setattr(ClientSession, "request", client_session.request) + + multi_response = await amake_requests( + ["http://mock.url", "http://mock.url"], + response_callback=mock_callback, + ) + assert multi_response == [{"test": "test"}, {"test": "test"}] + + with pytest.raises(ValueError): + await amake_requests( + ["http://mock.url", "http://mock.url"], method="PUT", raise_for_status=True + )