sample_id stringlengths 21 196 | text stringlengths 105 936k | metadata dict | category stringclasses 6
values |
|---|---|---|---|
home-assistant/core:homeassistant/components/droplet/sensor.py | """Support for Droplet."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime
from pydroplet.droplet import Droplet
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import EntityCategory, UnitOfVolume, UnitOfVolumeFlowRate
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import (
DOMAIN,
KEY_CURRENT_FLOW_RATE,
KEY_SERVER_CONNECTIVITY,
KEY_SIGNAL_QUALITY,
KEY_VOLUME,
)
from .coordinator import DropletConfigEntry, DropletDataCoordinator
ML_L_CONVERSION = 1000
@dataclass(kw_only=True, frozen=True)
class DropletSensorEntityDescription(SensorEntityDescription):
"""Describes Droplet sensor entity."""
value_fn: Callable[[Droplet], float | str | None]
last_reset_fn: Callable[[Droplet], datetime | None] = lambda _: None
SENSORS: list[DropletSensorEntityDescription] = [
DropletSensorEntityDescription(
key=KEY_CURRENT_FLOW_RATE,
translation_key=KEY_CURRENT_FLOW_RATE,
device_class=SensorDeviceClass.VOLUME_FLOW_RATE,
native_unit_of_measurement=UnitOfVolumeFlowRate.LITERS_PER_MINUTE,
suggested_unit_of_measurement=UnitOfVolumeFlowRate.GALLONS_PER_MINUTE,
suggested_display_precision=2,
state_class=SensorStateClass.MEASUREMENT,
value_fn=lambda device: device.get_flow_rate(),
),
DropletSensorEntityDescription(
key=KEY_VOLUME,
device_class=SensorDeviceClass.WATER,
native_unit_of_measurement=UnitOfVolume.LITERS,
suggested_unit_of_measurement=UnitOfVolume.GALLONS,
suggested_display_precision=2,
state_class=SensorStateClass.TOTAL,
value_fn=lambda device: device.get_volume_delta() / ML_L_CONVERSION,
last_reset_fn=lambda device: device.get_volume_last_fetched(),
),
DropletSensorEntityDescription(
key=KEY_SERVER_CONNECTIVITY,
translation_key=KEY_SERVER_CONNECTIVITY,
device_class=SensorDeviceClass.ENUM,
options=["connected", "connecting", "disconnected"],
value_fn=lambda device: device.get_server_status(),
entity_category=EntityCategory.DIAGNOSTIC,
),
DropletSensorEntityDescription(
key=KEY_SIGNAL_QUALITY,
translation_key=KEY_SIGNAL_QUALITY,
device_class=SensorDeviceClass.ENUM,
options=["no_signal", "weak_signal", "strong_signal"],
value_fn=lambda device: device.get_signal_quality(),
entity_category=EntityCategory.DIAGNOSTIC,
),
]
async def async_setup_entry(
hass: HomeAssistant,
config_entry: DropletConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Droplet sensors from config entry."""
coordinator = config_entry.runtime_data
async_add_entities([DropletSensor(coordinator, sensor) for sensor in SENSORS])
class DropletSensor(CoordinatorEntity[DropletDataCoordinator], SensorEntity):
"""Representation of a Droplet."""
entity_description: DropletSensorEntityDescription
_attr_has_entity_name = True
def __init__(
self,
coordinator: DropletDataCoordinator,
entity_description: DropletSensorEntityDescription,
) -> None:
"""Initialize the sensor."""
super().__init__(coordinator)
self.entity_description = entity_description
unique_id = coordinator.config_entry.unique_id
self._attr_unique_id = f"{unique_id}_{entity_description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, self.coordinator.unique_id)},
manufacturer=self.coordinator.droplet.get_manufacturer(),
model=self.coordinator.droplet.get_model(),
sw_version=self.coordinator.droplet.get_fw_version(),
serial_number=self.coordinator.droplet.get_sn(),
)
@property
def available(self) -> bool:
"""Get Droplet's availability."""
return self.coordinator.get_availability()
@property
def native_value(self) -> float | str | None:
"""Return the value reported by the sensor."""
return self.entity_description.value_fn(self.coordinator.droplet)
@property
def last_reset(self) -> datetime | None:
"""Return the last reset of the sensor, if applicable."""
return self.entity_description.last_reset_fn(self.coordinator.droplet)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/droplet/sensor.py",
"license": "Apache License 2.0",
"lines": 110,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/ekeybionyx/application_credentials.py | """application_credentials platform the Ekey Bionyx integration."""
from homeassistant.components.application_credentials import AuthorizationServer
from homeassistant.core import HomeAssistant
from .const import OAUTH2_AUTHORIZE, OAUTH2_TOKEN
async def async_get_authorization_server(hass: HomeAssistant) -> AuthorizationServer:
"""Return authorization server."""
return AuthorizationServer(
authorize_url=OAUTH2_AUTHORIZE,
token_url=OAUTH2_TOKEN,
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/ekeybionyx/application_credentials.py",
"license": "Apache License 2.0",
"lines": 10,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/ekeybionyx/config_flow.py | """Config flow for ekey bionyx."""
import asyncio
import json
import logging
import re
import secrets
from typing import Any, NotRequired, TypedDict
import aiohttp
import ekey_bionyxpy
import voluptuous as vol
from homeassistant.components.webhook import (
async_generate_id as webhook_generate_id,
async_generate_path as webhook_generate_path,
)
from homeassistant.config_entries import ConfigFlowResult
from homeassistant.const import CONF_TOKEN, CONF_URL
from homeassistant.helpers import config_entry_oauth2_flow, config_validation as cv
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.network import get_url
from homeassistant.helpers.selector import SelectOptionDict, SelectSelector
from .const import API_URL, DOMAIN, INTEGRATION_NAME, SCOPE
# Valid webhook name: starts with letter or underscore, contains letters, digits, spaces, dots, and underscores, does not end with space or dot
VALID_NAME_PATTERN = re.compile(r"^(?![\d\s])[\w\d \.]*[\w\d]$")
class ConfigFlowEkeyApi(ekey_bionyxpy.AbstractAuth):
"""ekey bionyx authentication before a ConfigEntry exists.
This implementation directly provides the token without supporting refresh.
"""
def __init__(
self,
websession: aiohttp.ClientSession,
token: dict[str, Any],
) -> None:
"""Initialize ConfigFlowEkeyApi."""
super().__init__(websession, API_URL)
self._token = token
async def async_get_access_token(self) -> str:
"""Return the token for the Ekey API."""
return self._token["access_token"]
class EkeyFlowData(TypedDict):
"""Type for Flow Data."""
api: NotRequired[ekey_bionyxpy.BionyxAPI]
system: NotRequired[ekey_bionyxpy.System]
systems: NotRequired[list[ekey_bionyxpy.System]]
class OAuth2FlowHandler(
config_entry_oauth2_flow.AbstractOAuth2FlowHandler, domain=DOMAIN
):
"""Config flow to handle ekey bionyx OAuth2 authentication."""
DOMAIN = DOMAIN
check_deletion_task: asyncio.Task[None] | None = None
def __init__(self) -> None:
"""Initialize OAuth2FlowHandler."""
super().__init__()
self._data: EkeyFlowData = {}
@property
def logger(self) -> logging.Logger:
"""Return logger."""
return logging.getLogger(__name__)
@property
def extra_authorize_data(self) -> dict[str, Any]:
"""Extra data that needs to be appended to the authorize url."""
return {"scope": SCOPE}
async def async_oauth_create_entry(self, data: dict[str, Any]) -> ConfigFlowResult:
"""Start the user facing flow by initializing the API and getting the systems."""
client = ConfigFlowEkeyApi(async_get_clientsession(self.hass), data[CONF_TOKEN])
ap = ekey_bionyxpy.BionyxAPI(client)
self._data["api"] = ap
try:
system_res = await ap.get_systems()
except aiohttp.ClientResponseError:
return self.async_abort(
reason="cannot_connect",
description_placeholders={"ekeybionyx": INTEGRATION_NAME},
)
system = [s for s in system_res if s.own_system]
if len(system) == 0:
return self.async_abort(reason="no_own_systems")
self._data["systems"] = system
if len(system) == 1:
# skipping choose_system since there is only one
self._data["system"] = system[0]
return await self.async_step_check_system(user_input=None)
return await self.async_step_choose_system(user_input=None)
async def async_step_choose_system(
self, user_input: dict[str, Any] | None
) -> ConfigFlowResult:
"""Dialog to choose System if multiple systems are present."""
if user_input is None:
options: list[SelectOptionDict] = [
{"value": s.system_id, "label": s.system_name}
for s in self._data["systems"]
]
data_schema = {vol.Required("system"): SelectSelector({"options": options})}
return self.async_show_form(
step_id="choose_system",
data_schema=vol.Schema(data_schema),
description_placeholders={"ekeybionyx": INTEGRATION_NAME},
)
self._data["system"] = [
s for s in self._data["systems"] if s.system_id == user_input["system"]
][0]
return await self.async_step_check_system(user_input=None)
async def async_step_check_system(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Check if system has open webhooks."""
system = self._data["system"]
await self.async_set_unique_id(system.system_id)
self._abort_if_unique_id_configured()
if (
system.function_webhook_quotas["free"] == 0
and system.function_webhook_quotas["used"] == 0
):
return self.async_abort(
reason="no_available_webhooks",
description_placeholders={"ekeybionyx": INTEGRATION_NAME},
)
if system.function_webhook_quotas["used"] > 0:
return await self.async_step_delete_webhooks()
return await self.async_step_webhooks(user_input=None)
async def async_step_webhooks(
self, user_input: dict[str, Any] | None
) -> ConfigFlowResult:
"""Dialog to setup webhooks."""
system = self._data["system"]
errors: dict[str, str] | None = None
if user_input is not None:
errors = {}
for key, webhook_name in user_input.items():
if key == CONF_URL:
continue
if not re.match(VALID_NAME_PATTERN, webhook_name):
errors.update({key: "invalid_name"})
try:
cv.url(user_input[CONF_URL])
except vol.Invalid:
errors[CONF_URL] = "invalid_url"
if set(user_input) == {CONF_URL}:
errors["base"] = "no_webhooks_provided"
if not errors:
webhook_data = [
{
"auth": secrets.token_hex(32),
"name": webhook_name,
"webhook_id": webhook_generate_id(),
}
for key, webhook_name in user_input.items()
if key != CONF_URL
]
for webhook in webhook_data:
wh_def: ekey_bionyxpy.WebhookData = {
"integrationName": "Home Assistant",
"functionName": webhook["name"],
"locationName": "Home Assistant",
"definition": {
"url": user_input[CONF_URL]
+ webhook_generate_path(webhook["webhook_id"]),
"authentication": {"apiAuthenticationType": "None"},
"securityLevel": "AllowHttp",
"method": "Post",
"body": {
"contentType": "application/json",
"content": json.dumps({"auth": webhook["auth"]}),
},
},
}
webhook["ekey_id"] = (await system.add_webhook(wh_def)).webhook_id
return self.async_create_entry(
title=self._data["system"].system_name,
data={"webhooks": webhook_data},
)
data_schema: dict[Any, Any] = {
vol.Optional(f"webhook{i + 1}"): vol.All(str, vol.Length(max=50))
for i in range(self._data["system"].function_webhook_quotas["free"])
}
data_schema[vol.Required(CONF_URL)] = str
return self.async_show_form(
step_id="webhooks",
data_schema=self.add_suggested_values_to_schema(
vol.Schema(data_schema),
{
CONF_URL: get_url(
self.hass,
allow_ip=True,
prefer_external=False,
)
}
| (user_input or {}),
),
errors=errors,
description_placeholders={
"webhooks_available": str(
self._data["system"].function_webhook_quotas["free"]
),
"ekeybionyx": INTEGRATION_NAME,
},
)
async def async_step_delete_webhooks(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Form to delete Webhooks."""
if user_input is None:
return self.async_show_form(step_id="delete_webhooks")
for webhook in await self._data["system"].get_webhooks():
await webhook.delete()
return await self.async_step_wait_for_deletion(user_input=None)
async def async_step_wait_for_deletion(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Wait for webhooks to be deleted in another flow."""
uncompleted_task: asyncio.Task[None] | None = None
if not self.check_deletion_task:
self.check_deletion_task = self.hass.async_create_task(
self.async_check_deletion_status()
)
if not self.check_deletion_task.done():
progress_action = "check_deletion_status"
uncompleted_task = self.check_deletion_task
if uncompleted_task:
return self.async_show_progress(
step_id="wait_for_deletion",
description_placeholders={"ekeybionyx": INTEGRATION_NAME},
progress_action=progress_action,
progress_task=uncompleted_task,
)
self.check_deletion_task = None
return self.async_show_progress_done(next_step_id="webhooks")
async def async_check_deletion_status(self) -> None:
"""Check if webhooks have been deleted."""
while True:
self._data["systems"] = await self._data["api"].get_systems()
self._data["system"] = [
s
for s in self._data["systems"]
if s.system_id == self._data["system"].system_id
][0]
if self._data["system"].function_webhook_quotas["used"] == 0:
break
await asyncio.sleep(5)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/ekeybionyx/config_flow.py",
"license": "Apache License 2.0",
"lines": 238,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/ekeybionyx/const.py | """Constants for the Ekey Bionyx integration."""
import logging
DOMAIN = "ekeybionyx"
INTEGRATION_NAME = "ekey bionyx"
LOGGER = logging.getLogger(__package__)
OAUTH2_AUTHORIZE = "https://ekeybionyxprod.b2clogin.com/ekeybionyxprod.onmicrosoft.com/B2C_1_sign_in_v2/oauth2/v2.0/authorize"
OAUTH2_TOKEN = "https://ekeybionyxprod.b2clogin.com/ekeybionyxprod.onmicrosoft.com/B2C_1_sign_in_v2/oauth2/v2.0/token"
API_URL = "https://api.bionyx.io/3rd-party/api"
SCOPE = "https://ekeybionyxprod.onmicrosoft.com/3rd-party-api/api-access"
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/ekeybionyx/const.py",
"license": "Apache License 2.0",
"lines": 9,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/ekeybionyx/event.py | """Event platform for ekey bionyx integration."""
from http import HTTPStatus
from aiohttp.hdrs import METH_POST
from aiohttp.web import Request, Response
from homeassistant.components.event import EventDeviceClass, EventEntity
from homeassistant.components.webhook import (
async_register as webhook_register,
async_unregister as webhook_unregister,
)
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import EkeyBionyxConfigEntry
from .const import DOMAIN
async def async_setup_entry(
hass: HomeAssistant,
entry: EkeyBionyxConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Ekey event."""
async_add_entities(EkeyEvent(data) for data in entry.data["webhooks"])
class EkeyEvent(EventEntity):
"""Ekey Event."""
_attr_device_class = EventDeviceClass.BUTTON
_attr_event_types = ["event happened"]
def __init__(
self,
data: dict[str, str],
) -> None:
"""Initialise a Ekey event entity."""
self._attr_name = data["name"]
self._attr_unique_id = data["ekey_id"]
self._webhook_id = data["webhook_id"]
self._auth = data["auth"]
@callback
def _async_handle_event(self) -> None:
"""Handle the webhook event."""
self._trigger_event("event happened")
self.async_write_ha_state()
async def async_added_to_hass(self) -> None:
"""Register callbacks with your device API/library."""
async def async_webhook_handler(
hass: HomeAssistant, webhook_id: str, request: Request
) -> Response | None:
try:
payload = await request.json()
except ValueError:
return Response(status=HTTPStatus.BAD_REQUEST)
auth = payload.get("auth")
if auth is None:
return Response(status=HTTPStatus.BAD_REQUEST)
if auth != self._auth:
return Response(status=HTTPStatus.UNAUTHORIZED)
self._async_handle_event()
return None
webhook_register(
self.hass,
DOMAIN,
f"Ekey {self._attr_name}",
self._webhook_id,
async_webhook_handler,
allowed_methods=[METH_POST],
)
async def async_will_remove_from_hass(self) -> None:
"""Unregister Webhook."""
webhook_unregister(self.hass, self._webhook_id)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/ekeybionyx/event.py",
"license": "Apache License 2.0",
"lines": 65,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/file/services.py | """File Service calls."""
from collections.abc import Callable
import json
import voluptuous as vol
import yaml
from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse, callback
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
from homeassistant.helpers import config_validation as cv
from .const import ATTR_FILE_ENCODING, ATTR_FILE_NAME, DOMAIN, SERVICE_READ_FILE
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register services for File integration."""
hass.services.async_register(
DOMAIN,
SERVICE_READ_FILE,
read_file,
schema=vol.Schema(
{
vol.Required(ATTR_FILE_NAME): cv.string,
vol.Required(ATTR_FILE_ENCODING): cv.string,
}
),
supports_response=SupportsResponse.ONLY,
)
ENCODING_LOADERS: dict[str, tuple[Callable, type[Exception]]] = {
"json": (json.loads, json.JSONDecodeError),
"yaml": (yaml.safe_load, yaml.YAMLError),
}
def read_file(call: ServiceCall) -> dict:
"""Handle read_file service call."""
file_name = call.data[ATTR_FILE_NAME]
file_encoding = call.data[ATTR_FILE_ENCODING].lower()
if not call.hass.config.is_allowed_path(file_name):
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="no_access_to_path",
translation_placeholders={"filename": file_name},
)
if file_encoding not in ENCODING_LOADERS:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="unsupported_file_encoding",
translation_placeholders={
"filename": file_name,
"encoding": file_encoding,
},
)
try:
with open(file_name, encoding="utf-8") as file:
file_content = file.read()
except FileNotFoundError as err:
raise ServiceValidationError(
translation_domain=DOMAIN,
translation_key="file_not_found",
translation_placeholders={"filename": file_name},
) from err
except OSError as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="file_read_error",
translation_placeholders={"filename": file_name},
) from err
loader, error_type = ENCODING_LOADERS[file_encoding]
try:
data = loader(file_content)
except error_type as err:
raise HomeAssistantError(
translation_domain=DOMAIN,
translation_key="file_decoding",
translation_placeholders={"filename": file_name, "encoding": file_encoding},
) from err
return {"data": data}
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/file/services.py",
"license": "Apache License 2.0",
"lines": 72,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/foscam/number.py | """Foscam number platform for Home Assistant."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from typing import Any
from libpyfoscamcgi import FoscamCamera
from homeassistant.components.number import NumberEntity, NumberEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import FoscamConfigEntry, FoscamCoordinator
from .entity import FoscamEntity
@dataclass(frozen=True, kw_only=True)
class FoscamNumberEntityDescription(NumberEntityDescription):
"""A custom entity description with adjustable features."""
native_value_fn: Callable[[FoscamCoordinator], int]
set_value_fn: Callable[[FoscamCamera, float], Any]
exists_fn: Callable[[FoscamCoordinator], bool] = lambda _: True
NUMBER_DESCRIPTIONS: list[FoscamNumberEntityDescription] = [
FoscamNumberEntityDescription(
key="device_volume",
translation_key="device_volume",
native_min_value=0,
native_max_value=100,
native_step=1,
native_value_fn=lambda coordinator: coordinator.data.device_volume,
set_value_fn=lambda session, value: session.setAudioVolume(value),
),
FoscamNumberEntityDescription(
key="speak_volume",
translation_key="speak_volume",
native_min_value=0,
native_max_value=100,
native_step=1,
native_value_fn=lambda coordinator: coordinator.data.speak_volume,
set_value_fn=lambda session, value: session.setSpeakVolume(value),
exists_fn=lambda coordinator: coordinator.data.supports_speak_volume_adjustment,
),
]
async def async_setup_entry(
hass: HomeAssistant,
config_entry: FoscamConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up foscam number from a config entry."""
coordinator = config_entry.runtime_data
async_add_entities(
FoscamVolumeNumberEntity(coordinator, description)
for description in NUMBER_DESCRIPTIONS
if description.exists_fn is None or description.exists_fn(coordinator)
)
class FoscamVolumeNumberEntity(FoscamEntity, NumberEntity):
"""Representation of a Foscam Smart AI number entity."""
entity_description: FoscamNumberEntityDescription
def __init__(
self,
coordinator: FoscamCoordinator,
description: FoscamNumberEntityDescription,
) -> None:
"""Initialize the data."""
entry_id = coordinator.config_entry.entry_id
super().__init__(coordinator, entry_id)
self.entity_description = description
self._attr_unique_id = f"{entry_id}_{description.key}"
@property
def native_value(self) -> float:
"""Return the current value."""
return self.entity_description.native_value_fn(self.coordinator)
async def async_set_native_value(self, value: float) -> None:
"""Set the value."""
await self.hass.async_add_executor_job(
self.entity_description.set_value_fn, self.coordinator.session, value
)
await self.coordinator.async_request_refresh()
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/foscam/number.py",
"license": "Apache License 2.0",
"lines": 73,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/geocaching/entity.py | """Sensor entities for Geocaching."""
from typing import cast
from geocachingapi.models import GeocachingCache
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import GeocachingDataUpdateCoordinator
# Base class for all platforms
class GeocachingBaseEntity(CoordinatorEntity[GeocachingDataUpdateCoordinator]):
"""Base class for Geocaching sensors."""
_attr_has_entity_name = True
# Base class for cache entities
class GeocachingCacheEntity(GeocachingBaseEntity):
"""Base class for Geocaching cache entities."""
def __init__(
self, coordinator: GeocachingDataUpdateCoordinator, cache: GeocachingCache
) -> None:
"""Initialize the Geocaching cache entity."""
super().__init__(coordinator)
self.cache = cache
# A device can have multiple entities, and for a cache which requires multiple entities we want to group them together.
# Therefore, we create a device for each cache, which holds all related entities.
self._attr_device_info = DeviceInfo(
name=f"Geocache {cache.name}",
identifiers={(DOMAIN, cast(str, cache.reference_code))},
entry_type=DeviceEntryType.SERVICE,
manufacturer=cache.owner.username,
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/geocaching/entity.py",
"license": "Apache License 2.0",
"lines": 28,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/hassio/switch.py | """Switch platform for Hass.io addons."""
from __future__ import annotations
import logging
from typing import Any
from aiohasupervisor import SupervisorError
from homeassistant.components.switch import SwitchEntity, SwitchEntityDescription
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_ICON
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import ADDONS_COORDINATOR, ATTR_STARTED, ATTR_STATE, DATA_KEY_ADDONS
from .entity import HassioAddonEntity
from .handler import get_supervisor_client
_LOGGER = logging.getLogger(__name__)
ENTITY_DESCRIPTION = SwitchEntityDescription(
key=ATTR_STATE,
name=None,
icon="mdi:puzzle",
entity_registry_enabled_default=False,
)
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Switch set up for Hass.io config entry."""
coordinator = hass.data[ADDONS_COORDINATOR]
async_add_entities(
HassioAddonSwitch(
addon=addon,
coordinator=coordinator,
entity_description=ENTITY_DESCRIPTION,
)
for addon in coordinator.data[DATA_KEY_ADDONS].values()
)
class HassioAddonSwitch(HassioAddonEntity, SwitchEntity):
"""Switch for Hass.io add-ons."""
@property
def is_on(self) -> bool | None:
"""Return true if the add-on is on."""
addon_data = self.coordinator.data[DATA_KEY_ADDONS].get(self._addon_slug, {})
state = addon_data.get(self.entity_description.key)
return state == ATTR_STARTED
@property
def entity_picture(self) -> str | None:
"""Return the icon of the add-on if any."""
if not self.available:
return None
addon_data = self.coordinator.data[DATA_KEY_ADDONS].get(self._addon_slug, {})
if addon_data.get(ATTR_ICON):
return f"/api/hassio/addons/{self._addon_slug}/icon"
return None
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the entity on."""
supervisor_client = get_supervisor_client(self.hass)
try:
await supervisor_client.addons.start_addon(self._addon_slug)
except SupervisorError as err:
raise HomeAssistantError(err) from err
await self.coordinator.force_addon_info_data_refresh(self._addon_slug)
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the entity off."""
supervisor_client = get_supervisor_client(self.hass)
try:
await supervisor_client.addons.stop_addon(self._addon_slug)
except SupervisorError as err:
_LOGGER.error("Failed to stop addon %s: %s", self._addon_slug, err)
raise HomeAssistantError(err) from err
await self.coordinator.force_addon_info_data_refresh(self._addon_slug)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/hassio/switch.py",
"license": "Apache License 2.0",
"lines": 70,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/history_stats/diagnostics.py | """Diagnostics support for history_stats."""
from __future__ import annotations
from typing import Any
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, config_entry: ConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for a config entry."""
registry = er.async_get(hass)
entities = registry.entities.get_entries_for_config_entry_id(config_entry.entry_id)
return {
"config_entry": config_entry.as_dict(),
"entity": [entity.extended_dict for entity in entities],
}
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/history_stats/diagnostics.py",
"license": "Apache License 2.0",
"lines": 16,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/homeassistant_connect_zbt2/config_flow.py | """Config flow for the Home Assistant Connect ZBT-2 integration."""
from __future__ import annotations
import logging
from typing import TYPE_CHECKING, Any, Protocol
from homeassistant.components import usb
from homeassistant.components.homeassistant_hardware import firmware_config_flow
from homeassistant.components.homeassistant_hardware.helpers import (
HardwareFirmwareDiscoveryInfo,
)
from homeassistant.components.homeassistant_hardware.util import (
ApplicationType,
FirmwareInfo,
ResetTarget,
)
from homeassistant.components.usb import (
usb_service_info_from_device,
usb_unique_id_from_service_info,
)
from homeassistant.config_entries import (
ConfigEntry,
ConfigEntryBaseFlow,
ConfigFlowContext,
ConfigFlowResult,
OptionsFlow,
)
from homeassistant.core import callback
from homeassistant.helpers.service_info.usb import UsbServiceInfo
from .const import (
DEVICE,
DOMAIN,
FIRMWARE,
FIRMWARE_VERSION,
HARDWARE_NAME,
MANUFACTURER,
NABU_CASA_FIRMWARE_RELEASES_URL,
PID,
PRODUCT,
SERIAL_NUMBER,
VID,
)
from .util import get_usb_service_info
_LOGGER = logging.getLogger(__name__)
if TYPE_CHECKING:
class FirmwareInstallFlowProtocol(Protocol):
"""Protocol describing `BaseFirmwareInstallFlow` for a mixin."""
def _get_translation_placeholders(self) -> dict[str, str]:
return {}
async def _install_firmware_step(
self,
fw_update_url: str,
fw_type: str,
firmware_name: str,
expected_installed_firmware_type: ApplicationType,
step_id: str,
next_step_id: str,
) -> ConfigFlowResult: ...
else:
# Multiple inheritance with `Protocol` seems to break
FirmwareInstallFlowProtocol = object
class ZBT2FirmwareMixin(ConfigEntryBaseFlow, FirmwareInstallFlowProtocol):
"""Mixin for Home Assistant Connect ZBT-2 firmware methods."""
context: ConfigFlowContext
ZIGBEE_BAUDRATE = 460800
# Early ZBT-2 samples used RTS/DTR to trigger the bootloader, later ones use the
# baudrate method. Since the two are mutually exclusive we just use both.
BOOTLOADER_RESET_METHODS = [ResetTarget.RTS_DTR, ResetTarget.BAUDRATE]
APPLICATION_PROBE_METHODS = [
(ApplicationType.GECKO_BOOTLOADER, 115200),
(ApplicationType.EZSP, ZIGBEE_BAUDRATE),
(ApplicationType.SPINEL, 460800),
]
async def async_step_install_zigbee_firmware(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Install Zigbee firmware."""
return await self._install_firmware_step(
fw_update_url=NABU_CASA_FIRMWARE_RELEASES_URL,
fw_type="zbt2_zigbee_ncp",
firmware_name="Zigbee",
expected_installed_firmware_type=ApplicationType.EZSP,
step_id="install_zigbee_firmware",
next_step_id="pre_confirm_zigbee",
)
async def async_step_install_thread_firmware(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Install Thread firmware."""
return await self._install_firmware_step(
fw_update_url=NABU_CASA_FIRMWARE_RELEASES_URL,
fw_type="zbt2_openthread_rcp",
firmware_name="OpenThread",
expected_installed_firmware_type=ApplicationType.SPINEL,
step_id="install_thread_firmware",
next_step_id="finish_thread_installation",
)
class HomeAssistantConnectZBT2ConfigFlow(
ZBT2FirmwareMixin,
firmware_config_flow.BaseFirmwareConfigFlow,
domain=DOMAIN,
):
"""Handle a config flow for Home Assistant Connect ZBT-2."""
VERSION = 1
MINOR_VERSION = 1
def __init__(self, *args: Any, **kwargs: Any) -> None:
"""Initialize the config flow."""
super().__init__(*args, **kwargs)
self._usb_info: UsbServiceInfo | None = None
@staticmethod
@callback
def async_get_options_flow(
config_entry: ConfigEntry,
) -> OptionsFlow:
"""Return the options flow."""
return HomeAssistantConnectZBT2OptionsFlowHandler(config_entry)
async def async_step_usb(self, discovery_info: UsbServiceInfo) -> ConfigFlowResult:
"""Handle usb discovery."""
unique_id = usb_unique_id_from_service_info(discovery_info)
discovery_info.device = await self.hass.async_add_executor_job(
usb.get_serial_by_id, discovery_info.device
)
try:
await self.async_set_unique_id(unique_id)
finally:
self._abort_if_unique_id_configured(updates={DEVICE: discovery_info.device})
self._usb_info = discovery_info
# Set parent class attributes
self._device = self._usb_info.device
self._hardware_name = HARDWARE_NAME
return await self.async_step_confirm()
async def async_step_import(
self, fw_discovery_info: HardwareFirmwareDiscoveryInfo
) -> ConfigFlowResult:
"""Handle import from ZHA/OTBR firmware notification."""
assert fw_discovery_info["usb_device"] is not None
usb_info = usb_service_info_from_device(fw_discovery_info["usb_device"])
unique_id = usb_unique_id_from_service_info(usb_info)
if await self.async_set_unique_id(unique_id, raise_on_progress=False):
self._abort_if_unique_id_configured(updates={DEVICE: usb_info.device})
self._usb_info = usb_info
self._device = usb_info.device
self._hardware_name = HARDWARE_NAME
self._probed_firmware_info = fw_discovery_info["firmware_info"]
return self._async_flow_finished()
def _async_flow_finished(self) -> ConfigFlowResult:
"""Create the config entry."""
assert self._usb_info is not None
assert self._probed_firmware_info is not None
return self.async_create_entry(
title=HARDWARE_NAME,
data={
VID: self._usb_info.vid,
PID: self._usb_info.pid,
SERIAL_NUMBER: self._usb_info.serial_number,
MANUFACTURER: self._usb_info.manufacturer,
PRODUCT: self._usb_info.description,
DEVICE: self._usb_info.device,
FIRMWARE: self._probed_firmware_info.firmware_type.value,
FIRMWARE_VERSION: self._probed_firmware_info.firmware_version,
},
)
class HomeAssistantConnectZBT2OptionsFlowHandler(
ZBT2FirmwareMixin, firmware_config_flow.BaseFirmwareOptionsFlow
):
"""Zigbee and Thread options flow handlers."""
def __init__(self, *args: Any, **kwargs: Any) -> None:
"""Instantiate options flow."""
super().__init__(*args, **kwargs)
self._usb_info = get_usb_service_info(self._config_entry)
self._hardware_name = HARDWARE_NAME
self._device = self._usb_info.device
self._probed_firmware_info = FirmwareInfo(
device=self._device,
firmware_type=ApplicationType(self._config_entry.data[FIRMWARE]),
firmware_version=self._config_entry.data[FIRMWARE_VERSION],
source="guess",
owners=[],
)
# Regenerate the translation placeholders
self._get_translation_placeholders()
def _async_flow_finished(self) -> ConfigFlowResult:
"""Create the config entry."""
assert self._probed_firmware_info is not None
self.hass.config_entries.async_update_entry(
entry=self.config_entry,
data={
**self.config_entry.data,
FIRMWARE: self._probed_firmware_info.firmware_type.value,
FIRMWARE_VERSION: self._probed_firmware_info.firmware_version,
},
options=self.config_entry.options,
)
return self.async_create_entry(title="", data={})
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/homeassistant_connect_zbt2/config_flow.py",
"license": "Apache License 2.0",
"lines": 192,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/homeassistant_connect_zbt2/const.py | """Constants for the Home Assistant Connect ZBT-2 integration."""
DOMAIN = "homeassistant_connect_zbt2"
NABU_CASA_FIRMWARE_RELEASES_URL = (
"https://api.github.com/repos/NabuCasa/silabs-firmware-builder/releases"
)
FIRMWARE = "firmware"
FIRMWARE_VERSION = "firmware_version"
SERIAL_NUMBER = "serial_number"
MANUFACTURER = "manufacturer"
PRODUCT = "product"
DESCRIPTION = "description"
PID = "pid"
VID = "vid"
DEVICE = "device"
HARDWARE_NAME = "Home Assistant Connect ZBT-2"
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/homeassistant_connect_zbt2/const.py",
"license": "Apache License 2.0",
"lines": 15,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/homeassistant_connect_zbt2/hardware.py | """The Home Assistant Connect ZBT-2 hardware platform."""
from __future__ import annotations
from homeassistant.components.hardware import HardwareInfo, USBInfo
from homeassistant.core import HomeAssistant, callback
from .config_flow import HomeAssistantConnectZBT2ConfigFlow
from .const import DOMAIN, HARDWARE_NAME, MANUFACTURER, PID, PRODUCT, SERIAL_NUMBER, VID
DOCUMENTATION_URL = (
"https://support.nabucasa.com/hc/en-us/categories/"
"24734620813469-Home-Assistant-Connect-ZBT-1"
)
EXPECTED_ENTRY_VERSION = (
HomeAssistantConnectZBT2ConfigFlow.VERSION,
HomeAssistantConnectZBT2ConfigFlow.MINOR_VERSION,
)
@callback
def async_info(hass: HomeAssistant) -> list[HardwareInfo]:
"""Return board info."""
entries = hass.config_entries.async_entries(DOMAIN)
return [
HardwareInfo(
board=None,
config_entries=[entry.entry_id],
dongle=USBInfo(
vid=entry.data[VID],
pid=entry.data[PID],
serial_number=entry.data[SERIAL_NUMBER],
manufacturer=entry.data[MANUFACTURER],
description=entry.data[PRODUCT],
),
name=HARDWARE_NAME,
url=DOCUMENTATION_URL,
)
for entry in entries
# Ignore unmigrated config entries in the hardware page
if (entry.version, entry.minor_version) == EXPECTED_ENTRY_VERSION
]
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/homeassistant_connect_zbt2/hardware.py",
"license": "Apache License 2.0",
"lines": 36,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/homeassistant_connect_zbt2/update.py | """Home Assistant Connect ZBT-2 firmware update entity."""
from __future__ import annotations
import logging
from homeassistant.components.homeassistant_hardware.coordinator import (
FirmwareUpdateCoordinator,
)
from homeassistant.components.homeassistant_hardware.update import (
BaseFirmwareUpdateEntity,
FirmwareUpdateEntityDescription,
)
from homeassistant.components.homeassistant_hardware.util import (
ApplicationType,
FirmwareInfo,
)
from homeassistant.components.update import UpdateDeviceClass
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import device_registry as dr, entity_registry as er
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import HomeAssistantConnectZBT2ConfigEntry
from .config_flow import ZBT2FirmwareMixin
from .const import DOMAIN, FIRMWARE, FIRMWARE_VERSION, HARDWARE_NAME, SERIAL_NUMBER
_LOGGER = logging.getLogger(__name__)
FIRMWARE_ENTITY_DESCRIPTIONS: dict[
ApplicationType | None, FirmwareUpdateEntityDescription
] = {
ApplicationType.EZSP: FirmwareUpdateEntityDescription(
key="firmware",
display_precision=0,
device_class=UpdateDeviceClass.FIRMWARE,
entity_category=EntityCategory.CONFIG,
version_parser=lambda fw: fw.split(" ", 1)[0],
fw_type="zbt2_zigbee_ncp",
version_key="ezsp_version",
expected_firmware_type=ApplicationType.EZSP,
firmware_name="EmberZNet Zigbee",
),
ApplicationType.SPINEL: FirmwareUpdateEntityDescription(
key="firmware",
display_precision=0,
device_class=UpdateDeviceClass.FIRMWARE,
entity_category=EntityCategory.CONFIG,
version_parser=lambda fw: fw.split("/", 1)[1].split("_", 1)[0],
fw_type="zbt2_openthread_rcp",
version_key="ot_rcp_version",
expected_firmware_type=ApplicationType.SPINEL,
firmware_name="OpenThread RCP",
),
ApplicationType.GECKO_BOOTLOADER: FirmwareUpdateEntityDescription(
key="firmware",
display_precision=0,
device_class=UpdateDeviceClass.FIRMWARE,
entity_category=EntityCategory.CONFIG,
version_parser=lambda fw: fw,
fw_type=None, # We don't want to update the bootloader
version_key="gecko_bootloader_version",
expected_firmware_type=ApplicationType.GECKO_BOOTLOADER,
firmware_name="Gecko Bootloader",
),
None: FirmwareUpdateEntityDescription(
key="firmware",
display_precision=0,
device_class=UpdateDeviceClass.FIRMWARE,
entity_category=EntityCategory.CONFIG,
version_parser=lambda fw: fw,
fw_type=None,
version_key=None,
expected_firmware_type=None,
firmware_name=None,
),
}
def _async_create_update_entity(
hass: HomeAssistant,
config_entry: HomeAssistantConnectZBT2ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> FirmwareUpdateEntity:
"""Create an update entity that handles firmware type changes."""
firmware_type = config_entry.data[FIRMWARE]
try:
entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[
ApplicationType(firmware_type)
]
except KeyError, ValueError:
_LOGGER.debug(
"Unknown firmware type %r, using default entity description", firmware_type
)
entity_description = FIRMWARE_ENTITY_DESCRIPTIONS[None]
entity = FirmwareUpdateEntity(
device=config_entry.data["device"],
config_entry=config_entry,
update_coordinator=config_entry.runtime_data.coordinator,
entity_description=entity_description,
)
def firmware_type_changed(
old_type: ApplicationType | None, new_type: ApplicationType | None
) -> None:
"""Replace the current entity when the firmware type changes."""
er.async_get(hass).async_remove(entity.entity_id)
async_add_entities(
[_async_create_update_entity(hass, config_entry, async_add_entities)]
)
entity.async_on_remove(
entity.add_firmware_type_changed_callback(firmware_type_changed)
)
return entity
async def async_setup_entry(
hass: HomeAssistant,
config_entry: HomeAssistantConnectZBT2ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the firmware update config entry."""
entity = _async_create_update_entity(hass, config_entry, async_add_entities)
async_add_entities([entity])
class FirmwareUpdateEntity(BaseFirmwareUpdateEntity):
"""Connect ZBT-2 firmware update entity."""
BOOTLOADER_RESET_METHODS = ZBT2FirmwareMixin.BOOTLOADER_RESET_METHODS
APPLICATION_PROBE_METHODS = ZBT2FirmwareMixin.APPLICATION_PROBE_METHODS
def __init__(
self,
device: str,
config_entry: HomeAssistantConnectZBT2ConfigEntry,
update_coordinator: FirmwareUpdateCoordinator,
entity_description: FirmwareUpdateEntityDescription,
) -> None:
"""Initialize the Connect ZBT-2 firmware update entity."""
super().__init__(device, config_entry, update_coordinator, entity_description)
serial_number = self._config_entry.data[SERIAL_NUMBER]
self._attr_unique_id = f"{serial_number}_{self.entity_description.key}"
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, serial_number)},
name=f"{HARDWARE_NAME} ({serial_number})",
model=HARDWARE_NAME,
manufacturer="Nabu Casa",
serial_number=serial_number,
)
# Use the cached firmware info if it exists
if self._config_entry.data[FIRMWARE] is not None:
self._current_firmware_info = FirmwareInfo(
device=device,
firmware_type=ApplicationType(self._config_entry.data[FIRMWARE]),
firmware_version=self._config_entry.data[FIRMWARE_VERSION],
owners=[],
source="homeassistant_connect_zbt2",
)
def _update_attributes(self) -> None:
"""Recompute the attributes of the entity."""
super()._update_attributes()
assert self.device_entry is not None
device_registry = dr.async_get(self.hass)
device_registry.async_update_device(
device_id=self.device_entry.id,
sw_version=f"{self.entity_description.firmware_name} {self._attr_installed_version}",
)
@callback
def _firmware_info_callback(self, firmware_info: FirmwareInfo) -> None:
"""Handle updated firmware info being pushed by an integration."""
self.hass.config_entries.async_update_entry(
self._config_entry,
data={
**self._config_entry.data,
FIRMWARE: firmware_info.firmware_type,
FIRMWARE_VERSION: firmware_info.firmware_version,
},
)
super()._firmware_info_callback(firmware_info)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/homeassistant_connect_zbt2/update.py",
"license": "Apache License 2.0",
"lines": 166,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/homeassistant_connect_zbt2/util.py | """Utility functions for Home Assistant Connect ZBT-2 integration."""
from __future__ import annotations
import logging
from homeassistant.config_entries import ConfigEntry
from homeassistant.helpers.service_info.usb import UsbServiceInfo
_LOGGER = logging.getLogger(__name__)
def get_usb_service_info(config_entry: ConfigEntry) -> UsbServiceInfo:
"""Return UsbServiceInfo."""
return UsbServiceInfo(
device=config_entry.data["device"],
vid=config_entry.data["vid"],
pid=config_entry.data["pid"],
serial_number=config_entry.data["serial_number"],
manufacturer=config_entry.data["manufacturer"],
description=config_entry.data["product"],
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/homeassistant_connect_zbt2/util.py",
"license": "Apache License 2.0",
"lines": 16,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/imeon_inverter/select.py | """Imeon inverter select support."""
from collections.abc import Awaitable, Callable
from dataclasses import dataclass
import logging
from homeassistant.components.select import SelectEntity, SelectEntityDescription
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import ATTR_INVERTER_MODE, INVERTER_MODE_OPTIONS
from .coordinator import Inverter, InverterCoordinator
from .entity import InverterEntity
type InverterConfigEntry = ConfigEntry[InverterCoordinator]
_LOGGER = logging.getLogger(__name__)
@dataclass(frozen=True, kw_only=True)
class ImeonSelectEntityDescription(SelectEntityDescription):
"""Class to describe an Imeon inverter select entity."""
set_value_fn: Callable[[Inverter, str], Awaitable[bool]]
values: dict[str, str]
SELECT_DESCRIPTIONS: tuple[ImeonSelectEntityDescription, ...] = (
ImeonSelectEntityDescription(
key="manager_inverter_mode",
translation_key="manager_inverter_mode",
options=list(INVERTER_MODE_OPTIONS),
values=ATTR_INVERTER_MODE,
set_value_fn=lambda api, mode: api.set_inverter_mode(
INVERTER_MODE_OPTIONS[mode]
),
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: InverterConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Create each select for a given config entry."""
coordinator = entry.runtime_data
async_add_entities(
InverterSelect(coordinator, entry, description)
for description in SELECT_DESCRIPTIONS
)
class InverterSelect(InverterEntity, SelectEntity):
"""Representation of an Imeon inverter select."""
entity_description: ImeonSelectEntityDescription
_attr_entity_category = EntityCategory.CONFIG
@property
def current_option(self) -> str | None:
"""Return the state of the select."""
value = self.coordinator.data.get(self.data_key)
if not isinstance(value, str):
return None
return self.entity_description.values.get(value)
async def async_select_option(self, option: str) -> None:
"""Change the selected option."""
await self.entity_description.set_value_fn(self.coordinator.api, option)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/imeon_inverter/select.py",
"license": "Apache License 2.0",
"lines": 55,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/irm_kmi/config_flow.py | """Config flow to set up IRM KMI integration via the UI."""
import logging
from irm_kmi_api import IrmKmiApiClient, IrmKmiApiError
import voluptuous as vol
from homeassistant.config_entries import (
ConfigFlow,
ConfigFlowResult,
OptionsFlow,
OptionsFlowWithReload,
)
from homeassistant.const import (
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_LOCATION,
CONF_UNIQUE_ID,
)
from homeassistant.core import callback
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.selector import (
LocationSelector,
SelectSelector,
SelectSelectorConfig,
SelectSelectorMode,
)
from .const import (
CONF_LANGUAGE_OVERRIDE,
CONF_LANGUAGE_OVERRIDE_OPTIONS,
DOMAIN,
OUT_OF_BENELUX,
USER_AGENT,
)
from .coordinator import IrmKmiConfigEntry
_LOGGER = logging.getLogger(__name__)
class IrmKmiConfigFlow(ConfigFlow, domain=DOMAIN):
"""Configuration flow for the IRM KMI integration."""
VERSION = 1
@staticmethod
@callback
def async_get_options_flow(_config_entry: IrmKmiConfigEntry) -> OptionsFlow:
"""Create the options flow."""
return IrmKmiOptionFlow()
async def async_step_user(self, user_input: dict | None = None) -> ConfigFlowResult:
"""Define the user step of the configuration flow."""
errors: dict = {}
default_location = {
ATTR_LATITUDE: self.hass.config.latitude,
ATTR_LONGITUDE: self.hass.config.longitude,
}
if user_input:
_LOGGER.debug("Provided config user is: %s", user_input)
lat: float = user_input[CONF_LOCATION][ATTR_LATITUDE]
lon: float = user_input[CONF_LOCATION][ATTR_LONGITUDE]
try:
api_data = await IrmKmiApiClient(
session=async_get_clientsession(self.hass),
user_agent=USER_AGENT,
).get_forecasts_coord({"lat": lat, "long": lon})
except IrmKmiApiError:
_LOGGER.exception(
"Encountered an unexpected error while configuring the integration"
)
return self.async_abort(reason="api_error")
if api_data["cityName"] in OUT_OF_BENELUX:
errors[CONF_LOCATION] = "out_of_benelux"
if not errors:
name: str = api_data["cityName"]
country: str = api_data["country"]
unique_id: str = f"{name.lower()} {country.lower()}"
await self.async_set_unique_id(unique_id)
self._abort_if_unique_id_configured()
user_input[CONF_UNIQUE_ID] = unique_id
return self.async_create_entry(title=name, data=user_input)
default_location = user_input[CONF_LOCATION]
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(
CONF_LOCATION, default=default_location
): LocationSelector()
}
),
errors=errors,
)
class IrmKmiOptionFlow(OptionsFlowWithReload):
"""Option flow for the IRM KMI integration, help change the options once the integration was configured."""
async def async_step_init(self, user_input: dict | None = None) -> ConfigFlowResult:
"""Manage the options."""
if user_input is not None:
_LOGGER.debug("Provided config user is: %s", user_input)
return self.async_create_entry(data=user_input)
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Optional(
CONF_LANGUAGE_OVERRIDE,
default=self.config_entry.options.get(
CONF_LANGUAGE_OVERRIDE, "none"
),
): SelectSelector(
SelectSelectorConfig(
options=CONF_LANGUAGE_OVERRIDE_OPTIONS,
mode=SelectSelectorMode.DROPDOWN,
translation_key=CONF_LANGUAGE_OVERRIDE,
)
)
}
),
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/irm_kmi/config_flow.py",
"license": "Apache License 2.0",
"lines": 110,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/irm_kmi/const.py | """Constants for the IRM KMI integration."""
from typing import Final
from homeassistant.components.weather import (
ATTR_CONDITION_CLEAR_NIGHT,
ATTR_CONDITION_CLOUDY,
ATTR_CONDITION_FOG,
ATTR_CONDITION_LIGHTNING_RAINY,
ATTR_CONDITION_PARTLYCLOUDY,
ATTR_CONDITION_POURING,
ATTR_CONDITION_RAINY,
ATTR_CONDITION_SNOWY,
ATTR_CONDITION_SNOWY_RAINY,
ATTR_CONDITION_SUNNY,
)
from homeassistant.const import Platform, __version__
DOMAIN: Final = "irm_kmi"
PLATFORMS: Final = [Platform.WEATHER]
OUT_OF_BENELUX: Final = [
"außerhalb der Benelux (Brussels)",
"Hors de Belgique (Bxl)",
"Outside the Benelux (Brussels)",
"Buiten de Benelux (Brussel)",
]
LANGS: Final = ["en", "fr", "nl", "de"]
CONF_LANGUAGE_OVERRIDE: Final = "language_override"
CONF_LANGUAGE_OVERRIDE_OPTIONS: Final = ["none", "fr", "nl", "de", "en"]
# Dict to map ('ww', 'dayNight') tuple from IRM KMI to HA conditions.
IRM_KMI_TO_HA_CONDITION_MAP: Final = {
(0, "d"): ATTR_CONDITION_SUNNY,
(0, "n"): ATTR_CONDITION_CLEAR_NIGHT,
(1, "d"): ATTR_CONDITION_SUNNY,
(1, "n"): ATTR_CONDITION_CLEAR_NIGHT,
(2, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
(2, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
(3, "d"): ATTR_CONDITION_PARTLYCLOUDY,
(3, "n"): ATTR_CONDITION_PARTLYCLOUDY,
(4, "d"): ATTR_CONDITION_POURING,
(4, "n"): ATTR_CONDITION_POURING,
(5, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
(5, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
(6, "d"): ATTR_CONDITION_POURING,
(6, "n"): ATTR_CONDITION_POURING,
(7, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
(7, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
(8, "d"): ATTR_CONDITION_SNOWY_RAINY,
(8, "n"): ATTR_CONDITION_SNOWY_RAINY,
(9, "d"): ATTR_CONDITION_SNOWY_RAINY,
(9, "n"): ATTR_CONDITION_SNOWY_RAINY,
(10, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
(10, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
(11, "d"): ATTR_CONDITION_SNOWY,
(11, "n"): ATTR_CONDITION_SNOWY,
(12, "d"): ATTR_CONDITION_SNOWY,
(12, "n"): ATTR_CONDITION_SNOWY,
(13, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
(13, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
(14, "d"): ATTR_CONDITION_CLOUDY,
(14, "n"): ATTR_CONDITION_CLOUDY,
(15, "d"): ATTR_CONDITION_CLOUDY,
(15, "n"): ATTR_CONDITION_CLOUDY,
(16, "d"): ATTR_CONDITION_POURING,
(16, "n"): ATTR_CONDITION_POURING,
(17, "d"): ATTR_CONDITION_LIGHTNING_RAINY,
(17, "n"): ATTR_CONDITION_LIGHTNING_RAINY,
(18, "d"): ATTR_CONDITION_RAINY,
(18, "n"): ATTR_CONDITION_RAINY,
(19, "d"): ATTR_CONDITION_POURING,
(19, "n"): ATTR_CONDITION_POURING,
(20, "d"): ATTR_CONDITION_SNOWY_RAINY,
(20, "n"): ATTR_CONDITION_SNOWY_RAINY,
(21, "d"): ATTR_CONDITION_RAINY,
(21, "n"): ATTR_CONDITION_RAINY,
(22, "d"): ATTR_CONDITION_SNOWY,
(22, "n"): ATTR_CONDITION_SNOWY,
(23, "d"): ATTR_CONDITION_SNOWY,
(23, "n"): ATTR_CONDITION_SNOWY,
(24, "d"): ATTR_CONDITION_FOG,
(24, "n"): ATTR_CONDITION_FOG,
(25, "d"): ATTR_CONDITION_FOG,
(25, "n"): ATTR_CONDITION_FOG,
(26, "d"): ATTR_CONDITION_FOG,
(26, "n"): ATTR_CONDITION_FOG,
(27, "d"): ATTR_CONDITION_FOG,
(27, "n"): ATTR_CONDITION_FOG,
}
IRM_KMI_NAME: Final = {
"fr": "Institut Royal Météorologique de Belgique",
"nl": "Koninklijk Meteorologisch Instituut van België",
"de": "Königliche Meteorologische Institut von Belgien",
"en": "Royal Meteorological Institute of Belgium",
}
USER_AGENT: Final = (
f"https://www.home-assistant.io/integrations/irm_kmi (version {__version__})"
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/irm_kmi/const.py",
"license": "Apache License 2.0",
"lines": 94,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/irm_kmi/coordinator.py | """DataUpdateCoordinator for the IRM KMI integration."""
from datetime import timedelta
import logging
from irm_kmi_api import IrmKmiApiClientHa, IrmKmiApiError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_LATITUDE, ATTR_LONGITUDE, CONF_LOCATION
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import (
TimestampDataUpdateCoordinator,
UpdateFailed,
)
from homeassistant.util import dt as dt_util
from homeassistant.util.dt import utcnow
from .data import ProcessedCoordinatorData
from .utils import preferred_language
_LOGGER = logging.getLogger(__name__)
type IrmKmiConfigEntry = ConfigEntry[IrmKmiCoordinator]
class IrmKmiCoordinator(TimestampDataUpdateCoordinator[ProcessedCoordinatorData]):
"""Coordinator to update data from IRM KMI."""
def __init__(
self,
hass: HomeAssistant,
entry: IrmKmiConfigEntry,
api_client: IrmKmiApiClientHa,
) -> None:
"""Initialize the coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=entry,
name="IRM KMI weather",
update_interval=timedelta(minutes=7),
)
self._api = api_client
self._location = entry.data[CONF_LOCATION]
async def _async_update_data(self) -> ProcessedCoordinatorData:
"""Fetch data from API endpoint.
This is the place to pre-process the data to lookup tables so entities can quickly look up their data.
:return: ProcessedCoordinatorData
"""
self._api.expire_cache()
try:
await self._api.refresh_forecasts_coord(
{
"lat": self._location[ATTR_LATITUDE],
"long": self._location[ATTR_LONGITUDE],
}
)
except IrmKmiApiError as err:
if (
self.last_update_success_time is not None
and self.update_interval is not None
and self.last_update_success_time - utcnow()
< timedelta(seconds=2.5 * self.update_interval.seconds)
):
return self.data
_LOGGER.warning(
"Could not connect to the API since %s", self.last_update_success_time
)
raise UpdateFailed(
f"Error communicating with API for general forecast: {err}. "
f"Last success time is: {self.last_update_success_time}"
) from err
if not self.last_update_success:
_LOGGER.warning("Successfully reconnected to the API")
return await self.process_api_data()
async def process_api_data(self) -> ProcessedCoordinatorData:
"""From the API data, create the object that will be used in the entities."""
tz = await dt_util.async_get_time_zone("Europe/Brussels")
lang = preferred_language(self.hass, self.config_entry)
return ProcessedCoordinatorData(
current_weather=self._api.get_current_weather(tz),
daily_forecast=self._api.get_daily_forecast(tz, lang),
hourly_forecast=self._api.get_hourly_forecast(tz),
country=self._api.get_country(),
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/irm_kmi/coordinator.py",
"license": "Apache License 2.0",
"lines": 76,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/irm_kmi/data.py | """Define data classes for the IRM KMI integration."""
from dataclasses import dataclass, field
from irm_kmi_api import CurrentWeatherData, ExtendedForecast
from homeassistant.components.weather import Forecast
@dataclass
class ProcessedCoordinatorData:
"""Dataclass that will be exposed to the entities consuming data from an IrmKmiCoordinator."""
current_weather: CurrentWeatherData
country: str
hourly_forecast: list[Forecast] = field(default_factory=list)
daily_forecast: list[ExtendedForecast] = field(default_factory=list)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/irm_kmi/data.py",
"license": "Apache License 2.0",
"lines": 11,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/irm_kmi/entity.py | """Base class shared among IRM KMI entities."""
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN, IRM_KMI_NAME
from .coordinator import IrmKmiConfigEntry, IrmKmiCoordinator
from .utils import preferred_language
class IrmKmiBaseEntity(CoordinatorEntity[IrmKmiCoordinator]):
"""Base methods for IRM KMI entities."""
_attr_attribution = (
"Weather data from the Royal Meteorological Institute of Belgium meteo.be"
)
_attr_has_entity_name = True
def __init__(self, entry: IrmKmiConfigEntry) -> None:
"""Init base properties for IRM KMI entities."""
coordinator = entry.runtime_data
super().__init__(coordinator)
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, entry.entry_id)},
manufacturer=IRM_KMI_NAME.get(preferred_language(self.hass, entry)),
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/irm_kmi/entity.py",
"license": "Apache License 2.0",
"lines": 21,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/irm_kmi/utils.py | """Helper functions for use with IRM KMI integration."""
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from .const import CONF_LANGUAGE_OVERRIDE, LANGS
def preferred_language(hass: HomeAssistant, config_entry: ConfigEntry | None) -> str:
"""Get the preferred language for the integration if it was overridden by the configuration."""
if (
config_entry is None
or config_entry.options.get(CONF_LANGUAGE_OVERRIDE) == "none"
):
return hass.config.language if hass.config.language in LANGS else "en"
return config_entry.options.get(CONF_LANGUAGE_OVERRIDE, "en")
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/irm_kmi/utils.py",
"license": "Apache License 2.0",
"lines": 12,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/irm_kmi/weather.py | """Support for IRM KMI weather."""
from irm_kmi_api import CurrentWeatherData
from homeassistant.components.weather import (
Forecast,
SingleCoordinatorWeatherEntity,
WeatherEntityFeature,
)
from homeassistant.const import (
CONF_UNIQUE_ID,
UnitOfPrecipitationDepth,
UnitOfPressure,
UnitOfSpeed,
UnitOfTemperature,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import IrmKmiConfigEntry, IrmKmiCoordinator
from .entity import IrmKmiBaseEntity
async def async_setup_entry(
_hass: HomeAssistant,
entry: IrmKmiConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the weather entry."""
async_add_entities([IrmKmiWeather(entry)])
# Coordinator is used to centralize the data updates
PARALLEL_UPDATES = 0
class IrmKmiWeather(
IrmKmiBaseEntity, # WeatherEntity
SingleCoordinatorWeatherEntity[IrmKmiCoordinator],
):
"""Weather entity for IRM KMI weather."""
_attr_name = None
_attr_supported_features = (
WeatherEntityFeature.FORECAST_DAILY
| WeatherEntityFeature.FORECAST_TWICE_DAILY
| WeatherEntityFeature.FORECAST_HOURLY
)
_attr_native_temperature_unit = UnitOfTemperature.CELSIUS
_attr_native_wind_speed_unit = UnitOfSpeed.KILOMETERS_PER_HOUR
_attr_native_precipitation_unit = UnitOfPrecipitationDepth.MILLIMETERS
_attr_native_pressure_unit = UnitOfPressure.HPA
def __init__(self, entry: IrmKmiConfigEntry) -> None:
"""Create a new instance of the weather entity from a configuration entry."""
IrmKmiBaseEntity.__init__(self, entry)
SingleCoordinatorWeatherEntity.__init__(self, entry.runtime_data)
self._attr_unique_id = entry.data[CONF_UNIQUE_ID]
@property
def available(self) -> bool:
"""Return True if entity is available."""
return super().available
@property
def current_weather(self) -> CurrentWeatherData:
"""Return the current weather."""
return self.coordinator.data.current_weather
@property
def condition(self) -> str | None:
"""Return the current condition."""
return self.current_weather.get("condition")
@property
def native_temperature(self) -> float | None:
"""Return the temperature in native units."""
return self.current_weather.get("temperature")
@property
def native_wind_speed(self) -> float | None:
"""Return the wind speed in native units."""
return self.current_weather.get("wind_speed")
@property
def native_wind_gust_speed(self) -> float | None:
"""Return the wind gust speed in native units."""
return self.current_weather.get("wind_gust_speed")
@property
def wind_bearing(self) -> float | str | None:
"""Return the wind bearing."""
return self.current_weather.get("wind_bearing")
@property
def native_pressure(self) -> float | None:
"""Return the pressure in native units."""
return self.current_weather.get("pressure")
@property
def uv_index(self) -> float | None:
"""Return the UV index."""
return self.current_weather.get("uv_index")
def _async_forecast_twice_daily(self) -> list[Forecast] | None:
"""Return the daily forecast in native units."""
return self.coordinator.data.daily_forecast
def _async_forecast_daily(self) -> list[Forecast] | None:
"""Return the daily forecast in native units."""
return self.daily_forecast()
def _async_forecast_hourly(self) -> list[Forecast] | None:
"""Return the hourly forecast in native units."""
return self.coordinator.data.hourly_forecast
def daily_forecast(self) -> list[Forecast] | None:
"""Return the daily forecast in native units."""
data: list[Forecast] = self.coordinator.data.daily_forecast
# The data in daily_forecast might contain nighttime forecast.
# The following handle the lowest temperature attribute to be displayed correctly.
if (
len(data) > 1
and not data[0].get("is_daytime")
and data[1].get("native_templow") is None
):
data[1]["native_templow"] = data[0].get("native_templow")
if (
data[1]["native_templow"] is not None
and data[1]["native_temperature"] is not None
and data[1]["native_templow"] > data[1]["native_temperature"]
):
(data[1]["native_templow"], data[1]["native_temperature"]) = (
data[1]["native_temperature"],
data[1]["native_templow"],
)
if len(data) > 0 and not data[0].get("is_daytime"):
return data
if (
len(data) > 1
and data[0].get("native_templow") is None
and not data[1].get("is_daytime")
):
data[0]["native_templow"] = data[1].get("native_templow")
if (
data[0]["native_templow"] is not None
and data[0]["native_temperature"] is not None
and data[0]["native_templow"] > data[0]["native_temperature"]
):
(data[0]["native_templow"], data[0]["native_temperature"]) = (
data[0]["native_temperature"],
data[0]["native_templow"],
)
return [f for f in data if f.get("is_daytime")]
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/irm_kmi/weather.py",
"license": "Apache License 2.0",
"lines": 130,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/lawn_mower/intent.py | """Intents for the lawn mower integration."""
from homeassistant.core import HomeAssistant
from homeassistant.helpers import intent
from . import DOMAIN, SERVICE_DOCK, SERVICE_START_MOWING, LawnMowerEntityFeature
INTENT_LANW_MOWER_START_MOWING = "HassLawnMowerStartMowing"
INTENT_LANW_MOWER_DOCK = "HassLawnMowerDock"
async def async_setup_intents(hass: HomeAssistant) -> None:
"""Set up the lawn mower intents."""
intent.async_register(
hass,
intent.ServiceIntentHandler(
INTENT_LANW_MOWER_START_MOWING,
DOMAIN,
SERVICE_START_MOWING,
description="Starts a lawn mower",
required_domains={DOMAIN},
platforms={DOMAIN},
required_features=LawnMowerEntityFeature.START_MOWING,
),
)
intent.async_register(
hass,
intent.ServiceIntentHandler(
INTENT_LANW_MOWER_DOCK,
DOMAIN,
SERVICE_DOCK,
description="Sends a lawn mower to dock",
required_domains={DOMAIN},
platforms={DOMAIN},
required_features=LawnMowerEntityFeature.DOCK,
),
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/lawn_mower/intent.py",
"license": "Apache License 2.0",
"lines": 32,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/letpot/number.py | """Support for LetPot number entities."""
from collections.abc import Callable, Coroutine
from dataclasses import dataclass
from typing import Any
from letpot.deviceclient import LetPotDeviceClient
from letpot.models import DeviceFeature
from homeassistant.components.number import (
NumberEntity,
NumberEntityDescription,
NumberMode,
)
from homeassistant.const import PRECISION_WHOLE, EntityCategory, UnitOfTime
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .coordinator import LetPotConfigEntry, LetPotDeviceCoordinator
from .entity import LetPotEntity, LetPotEntityDescription, exception_handler
# Each change pushes a 'full' device status with the change. The library will cache
# pending changes to avoid overwriting, but try to avoid a lot of parallelism.
PARALLEL_UPDATES = 1
@dataclass(frozen=True, kw_only=True)
class LetPotNumberEntityDescription(LetPotEntityDescription, NumberEntityDescription):
"""Describes a LetPot number entity."""
max_value_fn: Callable[[LetPotDeviceCoordinator], float]
value_fn: Callable[[LetPotDeviceCoordinator], float | None]
set_value_fn: Callable[[LetPotDeviceClient, str, float], Coroutine[Any, Any, None]]
NUMBERS: tuple[LetPotNumberEntityDescription, ...] = (
LetPotNumberEntityDescription(
key="light_brightness_levels",
translation_key="light_brightness",
value_fn=(
lambda coordinator: (
coordinator.device_client.get_light_brightness_levels(
coordinator.device.serial_number
).index(coordinator.data.light_brightness)
+ 1
if coordinator.data.light_brightness is not None
else None
)
),
set_value_fn=(
lambda device_client, serial, value: device_client.set_light_brightness(
serial,
device_client.get_light_brightness_levels(serial)[int(value) - 1],
)
),
supported_fn=(
lambda coordinator: (
DeviceFeature.LIGHT_BRIGHTNESS_LEVELS
in coordinator.device_client.device_info(
coordinator.device.serial_number
).features
)
),
native_min_value=float(1),
max_value_fn=lambda coordinator: float(
len(
coordinator.device_client.get_light_brightness_levels(
coordinator.device.serial_number
)
)
),
native_step=PRECISION_WHOLE,
mode=NumberMode.SLIDER,
entity_category=EntityCategory.CONFIG,
),
LetPotNumberEntityDescription(
key="plant_days",
translation_key="plant_days",
native_unit_of_measurement=UnitOfTime.DAYS,
value_fn=lambda coordinator: coordinator.data.plant_days,
set_value_fn=(
lambda device_client, serial, value: device_client.set_plant_days(
serial, int(value)
)
),
native_min_value=float(0),
max_value_fn=lambda _: float(999),
native_step=PRECISION_WHOLE,
mode=NumberMode.BOX,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: LetPotConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up LetPot number entities based on a config entry and device status/features."""
coordinators = entry.runtime_data
async_add_entities(
LetPotNumberEntity(coordinator, description)
for description in NUMBERS
for coordinator in coordinators
if description.supported_fn(coordinator)
)
class LetPotNumberEntity(LetPotEntity, NumberEntity):
"""Defines a LetPot number entity."""
entity_description: LetPotNumberEntityDescription
def __init__(
self,
coordinator: LetPotDeviceCoordinator,
description: LetPotNumberEntityDescription,
) -> None:
"""Initialize LetPot number entity."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{coordinator.config_entry.unique_id}_{coordinator.device.serial_number}_{description.key}"
@property
def native_max_value(self) -> float:
"""Return the maximum available value."""
return self.entity_description.max_value_fn(self.coordinator)
@property
def native_value(self) -> float | None:
"""Return the number value."""
return self.entity_description.value_fn(self.coordinator)
@exception_handler
async def async_set_native_value(self, value: float) -> None:
"""Change the number value."""
return await self.entity_description.set_value_fn(
self.coordinator.device_client,
self.coordinator.device.serial_number,
value,
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/letpot/number.py",
"license": "Apache License 2.0",
"lines": 122,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/libre_hardware_monitor/config_flow.py | """Config flow for LibreHardwareMonitor."""
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from librehardwaremonitor_api import (
LibreHardwareMonitorClient,
LibreHardwareMonitorConnectionError,
LibreHardwareMonitorNoDevicesError,
LibreHardwareMonitorUnauthorizedError,
)
import voluptuous as vol
from homeassistant.config_entries import (
SOURCE_REAUTH,
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
)
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME
from .const import DEFAULT_HOST, DEFAULT_PORT, DOMAIN
_LOGGER = logging.getLogger(__name__)
CONFIG_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST, default=DEFAULT_HOST): str,
vol.Required(CONF_PORT, default=DEFAULT_PORT): int,
}
)
REAUTH_SCHEMA = vol.Schema(
{
vol.Required(CONF_USERNAME): str,
vol.Required(CONF_PASSWORD): str,
}
)
async def _validate_connection(user_input: dict[str, Any]) -> str:
"""Ensure a connection can be established."""
api = LibreHardwareMonitorClient(
host=user_input[CONF_HOST],
port=user_input[CONF_PORT],
username=user_input.get(CONF_USERNAME),
password=user_input.get(CONF_PASSWORD),
)
return (await api.get_data()).computer_name
class LibreHardwareMonitorConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for LibreHardwareMonitor."""
VERSION = 2
def __init__(self) -> None:
"""Init config flow."""
self._host: str | None = None
self._port: int | None = None
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
self._async_abort_entries_match(user_input)
try:
computer_name = await _validate_connection(user_input)
except LibreHardwareMonitorConnectionError as exception:
_LOGGER.error(exception)
errors["base"] = "cannot_connect"
except LibreHardwareMonitorUnauthorizedError:
self._host = user_input[CONF_HOST]
self._port = user_input[CONF_PORT]
return await self.async_step_reauth_confirm()
except LibreHardwareMonitorNoDevicesError:
errors["base"] = "no_devices"
else:
return self.async_create_entry(
title=f"{computer_name} ({user_input[CONF_HOST]}:{user_input[CONF_PORT]})",
data=user_input,
)
return self.async_show_form(
step_id="user",
data_schema=self.add_suggested_values_to_schema(CONFIG_SCHEMA, user_input),
errors=errors,
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauthentication upon an API authentication error."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Confirm (re)authentication dialog."""
errors: dict[str, str] = {}
# we use this step both for initial auth and for re-auth
reauth_entry: ConfigEntry | None = None
if self.source == SOURCE_REAUTH:
reauth_entry = self._get_reauth_entry()
if user_input:
data = {
CONF_HOST: reauth_entry.data[CONF_HOST] if reauth_entry else self._host,
CONF_PORT: reauth_entry.data[CONF_PORT] if reauth_entry else self._port,
**user_input,
}
try:
computer_name = await _validate_connection(data)
except LibreHardwareMonitorConnectionError as exception:
_LOGGER.error(exception)
errors["base"] = "cannot_connect"
except LibreHardwareMonitorUnauthorizedError:
errors["base"] = "invalid_auth"
except LibreHardwareMonitorNoDevicesError:
errors["base"] = "no_devices"
else:
if self.source == SOURCE_REAUTH:
return self.async_update_reload_and_abort(
entry=reauth_entry, # type: ignore[arg-type]
data_updates=user_input,
)
# the initial connection was unauthorized, now we can create the config entry
return self.async_create_entry(
title=f"{computer_name} ({self._host}:{self._port})",
data=data,
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=self.add_suggested_values_to_schema(
REAUTH_SCHEMA,
{
CONF_USERNAME: user_input[CONF_USERNAME]
if user_input is not None
else reauth_entry.data.get(CONF_USERNAME)
if reauth_entry is not None
else None
},
),
errors=errors,
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/libre_hardware_monitor/config_flow.py",
"license": "Apache License 2.0",
"lines": 131,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/libre_hardware_monitor/const.py | """Constants for the LibreHardwareMonitor integration."""
DOMAIN = "libre_hardware_monitor"
DEFAULT_HOST = "localhost"
DEFAULT_PORT = 8085
DEFAULT_SCAN_INTERVAL = 10
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/libre_hardware_monitor/const.py",
"license": "Apache License 2.0",
"lines": 5,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/libre_hardware_monitor/coordinator.py | """Coordinator for LibreHardwareMonitor integration."""
from __future__ import annotations
from datetime import timedelta
import logging
from librehardwaremonitor_api import (
LibreHardwareMonitorClient,
LibreHardwareMonitorConnectionError,
LibreHardwareMonitorNoDevicesError,
LibreHardwareMonitorUnauthorizedError,
)
from librehardwaremonitor_api.model import (
DeviceId,
DeviceName,
LibreHardwareMonitorData,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_HOST, CONF_PASSWORD, CONF_PORT, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers import device_registry as dr, issue_registry as ir
from homeassistant.helpers.aiohttp_client import async_create_clientsession
from homeassistant.helpers.device_registry import DeviceEntry
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
_LOGGER = logging.getLogger(__name__)
type LibreHardwareMonitorConfigEntry = ConfigEntry[LibreHardwareMonitorCoordinator]
class LibreHardwareMonitorCoordinator(DataUpdateCoordinator[LibreHardwareMonitorData]):
"""Class to manage fetching LibreHardwareMonitor data."""
config_entry: LibreHardwareMonitorConfigEntry
def __init__(
self, hass: HomeAssistant, config_entry: LibreHardwareMonitorConfigEntry
) -> None:
"""Initialize."""
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
config_entry=config_entry,
update_interval=timedelta(seconds=DEFAULT_SCAN_INTERVAL),
)
self._entry_id = config_entry.entry_id
self._api = LibreHardwareMonitorClient(
host=config_entry.data[CONF_HOST],
port=config_entry.data[CONF_PORT],
username=config_entry.data.get(CONF_USERNAME),
password=config_entry.data.get(CONF_PASSWORD),
session=async_create_clientsession(hass),
)
device_entries: list[DeviceEntry] = dr.async_entries_for_config_entry(
registry=dr.async_get(self.hass), config_entry_id=self._entry_id
)
self._previous_devices: dict[DeviceId, DeviceName] = {
DeviceId(next(iter(device.identifiers))[1]): DeviceName(device.name)
for device in device_entries
if device.identifiers and device.name
}
self._is_deprecated_version: bool | None = None
async def _async_update_data(self) -> LibreHardwareMonitorData:
try:
lhm_data = await self._api.get_data()
except LibreHardwareMonitorConnectionError as err:
raise UpdateFailed(
"LibreHardwareMonitor connection failed, will retry", retry_after=25
) from err
except LibreHardwareMonitorUnauthorizedError as err:
_LOGGER.error("Authentication against LibreHardwareMonitor instance failed")
raise ConfigEntryAuthFailed("Authentication failed") from err
except LibreHardwareMonitorNoDevicesError as err:
raise UpdateFailed("No sensor data available, will retry") from err
# Check whether user has upgraded LHM from a deprecated version while the integration is running
if self._is_deprecated_version and not lhm_data.is_deprecated_version:
# Clear deprecation issue
ir.async_delete_issue(self.hass, DOMAIN, f"deprecated_api_{self._entry_id}")
self._is_deprecated_version = lhm_data.is_deprecated_version
await self._async_handle_changes_in_devices(
dict(lhm_data.main_device_ids_and_names)
)
return lhm_data
async def _async_refresh(
self,
log_failures: bool = True,
raise_on_auth_failed: bool = False,
scheduled: bool = False,
raise_on_entry_error: bool = False,
) -> None:
# we don't expect the computer to be online 24/7 so we don't want to log a connection loss as an error
await super()._async_refresh(
False, raise_on_auth_failed, scheduled, raise_on_entry_error
)
async def _async_handle_changes_in_devices(
self, detected_devices: dict[DeviceId, DeviceName]
) -> None:
"""Handle device changes by deleting devices from / adding devices to Home Assistant."""
detected_devices = {
DeviceId(f"{self.config_entry.entry_id}_{detected_id}"): device_name
for detected_id, device_name in detected_devices.items()
}
previous_device_ids = set(self._previous_devices.keys())
detected_device_ids = set(detected_devices.keys())
_LOGGER.debug("Previous device_ids: %s", previous_device_ids)
_LOGGER.debug("Detected device_ids: %s", detected_device_ids)
if previous_device_ids == detected_device_ids:
return
if orphaned_devices := previous_device_ids - detected_device_ids:
_LOGGER.warning(
"Device(s) no longer available, will be removed: %s",
[self._previous_devices[device_id] for device_id in orphaned_devices],
)
device_registry = dr.async_get(self.hass)
for device_id in orphaned_devices:
if device := device_registry.async_get_device(
identifiers={(DOMAIN, device_id)}
):
_LOGGER.debug(
"Removing device: %s", self._previous_devices[device_id]
)
device_registry.async_update_device(
device_id=device.id,
remove_config_entry_id=self.config_entry.entry_id,
)
if self.data is None:
# initial update during integration startup
self._previous_devices = detected_devices # type: ignore[unreachable]
return
if new_devices := detected_device_ids - previous_device_ids:
_LOGGER.warning(
"New Device(s) detected, reload integration to add them to Home Assistant: %s",
[detected_devices[DeviceId(device_id)] for device_id in new_devices],
)
self._previous_devices = detected_devices
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/libre_hardware_monitor/coordinator.py",
"license": "Apache License 2.0",
"lines": 130,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/libre_hardware_monitor/sensor.py | """Support for LibreHardwareMonitor Sensor Platform."""
from __future__ import annotations
from typing import Any
from librehardwaremonitor_api.model import LibreHardwareMonitorSensorData
from librehardwaremonitor_api.sensor_type import SensorType
from homeassistant.components.sensor import SensorEntity, SensorStateClass
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import LibreHardwareMonitorConfigEntry, LibreHardwareMonitorCoordinator
from .const import DOMAIN
PARALLEL_UPDATES = 0
STATE_MIN_VALUE = "min_value"
STATE_MAX_VALUE = "max_value"
async def async_setup_entry(
hass: HomeAssistant,
config_entry: LibreHardwareMonitorConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the LibreHardwareMonitor platform."""
lhm_coordinator = config_entry.runtime_data
async_add_entities(
LibreHardwareMonitorSensor(lhm_coordinator, config_entry.entry_id, sensor_data)
for sensor_data in lhm_coordinator.data.sensor_data.values()
)
class LibreHardwareMonitorSensor(
CoordinatorEntity[LibreHardwareMonitorCoordinator], SensorEntity
):
"""Sensor to display information from LibreHardwareMonitor."""
_attr_state_class = SensorStateClass.MEASUREMENT
_attr_has_entity_name = True
def __init__(
self,
coordinator: LibreHardwareMonitorCoordinator,
entry_id: str,
sensor_data: LibreHardwareMonitorSensorData,
) -> None:
"""Initialize an LibreHardwareMonitor sensor."""
super().__init__(coordinator)
self._attr_name: str = sensor_data.name
self._set_state(coordinator.data.is_deprecated_version, sensor_data)
self._attr_unique_id: str = f"{entry_id}_{sensor_data.sensor_id}"
self._sensor_id: str = sensor_data.sensor_id
# Hardware device
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, f"{entry_id}_{sensor_data.device_id}")},
name=f"[{coordinator.data.computer_name}] {sensor_data.device_name}",
model=sensor_data.device_type,
)
def _set_state(
self,
is_deprecated_lhm_version: bool,
sensor_data: LibreHardwareMonitorSensorData,
) -> None:
value = sensor_data.value
min_value = sensor_data.min
max_value = sensor_data.max
unit = sensor_data.unit
if not is_deprecated_lhm_version and sensor_data.type == SensorType.THROUGHPUT:
# Temporary fix: convert the B/s value to KB/s to not break existing entries
# This will be migrated properly once SensorDeviceClass is introduced
value = f"{(float(value) / 1024):.1f}" if value else None
min_value = f"{(float(min_value) / 1024):.1f}" if min_value else None
max_value = f"{(float(max_value) / 1024):.1f}" if max_value else None
unit = "KB/s"
self._attr_native_value: str | None = value
self._attr_extra_state_attributes: dict[str, Any] = {
STATE_MIN_VALUE: min_value,
STATE_MAX_VALUE: max_value,
}
self._attr_native_unit_of_measurement = unit
@callback
def _handle_coordinator_update(self) -> None:
"""Handle updated data from the coordinator."""
if sensor_data := self.coordinator.data.sensor_data.get(self._sensor_id):
self._set_state(self.coordinator.data.is_deprecated_version, sensor_data)
else:
self._attr_native_value = None
super()._handle_coordinator_update()
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/libre_hardware_monitor/sensor.py",
"license": "Apache License 2.0",
"lines": 80,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/media_source/helper.py | """Helpers for media source."""
from __future__ import annotations
from collections.abc import Callable
from homeassistant.components.media_player import BrowseError, BrowseMedia
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.frame import report_usage
from homeassistant.helpers.typing import UNDEFINED, UndefinedType
from homeassistant.loader import bind_hass
from .const import DOMAIN, MEDIA_SOURCE_DATA
from .error import UnknownMediaSource, Unresolvable
from .models import BrowseMediaSource, MediaSourceItem, PlayMedia
@callback
def _get_media_item(
hass: HomeAssistant, media_content_id: str | None, target_media_player: str | None
) -> MediaSourceItem:
"""Return media item."""
if media_content_id:
item = MediaSourceItem.from_uri(hass, media_content_id, target_media_player)
else:
# We default to our own domain if its only one registered
domain = None if len(hass.data[MEDIA_SOURCE_DATA]) > 1 else DOMAIN
return MediaSourceItem(hass, domain, "", target_media_player)
if item.domain is not None and item.domain not in hass.data[MEDIA_SOURCE_DATA]:
raise UnknownMediaSource(
translation_domain=DOMAIN,
translation_key="unknown_media_source",
translation_placeholders={"domain": item.domain},
)
return item
@bind_hass
async def async_browse_media(
hass: HomeAssistant,
media_content_id: str | None,
*,
content_filter: Callable[[BrowseMedia], bool] | None = None,
) -> BrowseMediaSource:
"""Return media player browse media results."""
if DOMAIN not in hass.data:
raise BrowseError("Media Source not loaded")
try:
item = await _get_media_item(hass, media_content_id, None).async_browse()
except ValueError as err:
raise BrowseError(
translation_domain=DOMAIN,
translation_key="browse_media_failed",
translation_placeholders={
"media_content_id": str(media_content_id),
"error": str(err),
},
) from err
if content_filter is None or item.children is None:
return item
old_count = len(item.children)
item.children = [
child for child in item.children if child.can_expand or content_filter(child)
]
item.not_shown += old_count - len(item.children)
return item
@bind_hass
async def async_resolve_media(
hass: HomeAssistant,
media_content_id: str,
target_media_player: str | None | UndefinedType = UNDEFINED,
) -> PlayMedia:
"""Get info to play media."""
if DOMAIN not in hass.data:
raise Unresolvable("Media Source not loaded")
if target_media_player is UNDEFINED:
report_usage(
"calls media_source.async_resolve_media without passing an entity_id",
exclude_integrations={DOMAIN},
)
target_media_player = None
try:
item = _get_media_item(hass, media_content_id, target_media_player)
except ValueError as err:
raise Unresolvable(
translation_domain=DOMAIN,
translation_key="resolve_media_failed",
translation_placeholders={
"media_content_id": str(media_content_id),
"error": str(err),
},
) from err
return await item.async_resolve()
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/media_source/helper.py",
"license": "Apache License 2.0",
"lines": 85,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/media_source/http.py | """HTTP views and WebSocket commands for media sources."""
from __future__ import annotations
from typing import Any
import voluptuous as vol
from homeassistant.components import frontend, websocket_api
from homeassistant.components.media_player import (
ATTR_MEDIA_CONTENT_ID,
CONTENT_AUTH_EXPIRY_TIME,
BrowseError,
async_process_play_media_url,
)
from homeassistant.components.websocket_api import ActiveConnection
from homeassistant.core import HomeAssistant
from .error import Unresolvable
from .helper import async_browse_media, async_resolve_media
def async_setup(hass: HomeAssistant) -> None:
"""Set up the HTTP views and WebSocket commands for media sources."""
websocket_api.async_register_command(hass, websocket_browse_media)
websocket_api.async_register_command(hass, websocket_resolve_media)
frontend.async_register_built_in_panel(
hass, "media-browser", "media_browser", "mdi:play-box-multiple"
)
@websocket_api.websocket_command(
{
vol.Required("type"): "media_source/browse_media",
vol.Optional(ATTR_MEDIA_CONTENT_ID, default=""): str,
}
)
@websocket_api.async_response
async def websocket_browse_media(
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
) -> None:
"""Browse available media."""
try:
media = await async_browse_media(hass, msg.get("media_content_id", ""))
connection.send_result(
msg["id"],
media.as_dict(),
)
except BrowseError as err:
connection.send_error(msg["id"], "browse_media_failed", str(err))
@websocket_api.websocket_command(
{
vol.Required("type"): "media_source/resolve_media",
vol.Required(ATTR_MEDIA_CONTENT_ID): str,
vol.Optional("expires", default=CONTENT_AUTH_EXPIRY_TIME): int,
}
)
@websocket_api.async_response
async def websocket_resolve_media(
hass: HomeAssistant, connection: ActiveConnection, msg: dict[str, Any]
) -> None:
"""Resolve media."""
try:
media = await async_resolve_media(hass, msg["media_content_id"], None)
except Unresolvable as err:
connection.send_error(msg["id"], "resolve_media_failed", str(err))
return
connection.send_result(
msg["id"],
{
"url": async_process_play_media_url(
hass, media.url, allow_relative_url=True
),
"mime_type": media.mime_type,
},
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/media_source/http.py",
"license": "Apache License 2.0",
"lines": 67,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/nederlandse_spoorwegen/config_flow.py | """Config flow for Nederlandse Spoorwegen integration."""
from __future__ import annotations
import logging
from typing import Any
from ns_api import NSAPI, Station
from requests.exceptions import (
ConnectionError as RequestsConnectionError,
HTTPError,
Timeout,
)
import voluptuous as vol
from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
ConfigSubentryData,
ConfigSubentryFlow,
SubentryFlowResult,
)
from homeassistant.const import CONF_API_KEY, CONF_NAME
from homeassistant.core import callback
from homeassistant.helpers.selector import (
SelectOptionDict,
SelectSelector,
SelectSelectorConfig,
TimeSelector,
)
from .const import (
CONF_FROM,
CONF_ROUTES,
CONF_TIME,
CONF_TO,
CONF_VIA,
DOMAIN,
INTEGRATION_TITLE,
)
_LOGGER = logging.getLogger(__name__)
class NSConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Nederlandse Spoorwegen."""
VERSION = 1
MINOR_VERSION = 1
async def _validate_api_key(self, api_key: str) -> dict[str, str]:
"""Validate the API key by testing connection to NS API.
Returns a dict of errors, empty if validation successful.
"""
errors: dict[str, str] = {}
client = NSAPI(api_key)
try:
await self.hass.async_add_executor_job(client.get_stations)
except HTTPError:
errors["base"] = "invalid_auth"
except RequestsConnectionError, Timeout:
errors["base"] = "cannot_connect"
except Exception:
_LOGGER.exception("Unexpected exception validating API key")
errors["base"] = "unknown"
return errors
def _is_api_key_already_configured(
self, api_key: str, exclude_entry_id: str | None = None
) -> dict[str, str]:
"""Check if the API key is already configured in another entry.
Args:
api_key: The API key to check.
exclude_entry_id: Optional entry ID to exclude from the check.
Returns:
A dict of errors, empty if not already configured.
"""
for entry in self._async_current_entries():
if (
entry.entry_id != exclude_entry_id
and entry.data.get(CONF_API_KEY) == api_key
):
return {"base": "already_configured"}
return {}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step of the config flow (API key)."""
errors: dict[str, str] = {}
if user_input is not None:
self._async_abort_entries_match(user_input)
errors = await self._validate_api_key(user_input[CONF_API_KEY])
if not errors:
return self.async_create_entry(
title=INTEGRATION_TITLE,
data={CONF_API_KEY: user_input[CONF_API_KEY]},
)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}),
errors=errors,
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfiguration to update the API key from the UI."""
errors: dict[str, str] = {}
reconfigure_entry = self._get_reconfigure_entry()
if user_input is not None:
# Check if this API key is already used by another entry
errors = self._is_api_key_already_configured(
user_input[CONF_API_KEY], exclude_entry_id=reconfigure_entry.entry_id
)
if not errors:
errors = await self._validate_api_key(user_input[CONF_API_KEY])
if not errors:
return self.async_update_reload_and_abort(
reconfigure_entry,
data_updates={CONF_API_KEY: user_input[CONF_API_KEY]},
)
return self.async_show_form(
step_id="reconfigure",
data_schema=vol.Schema({vol.Required(CONF_API_KEY): str}),
errors=errors,
)
async def async_step_import(self, import_data: dict[str, Any]) -> ConfigFlowResult:
"""Handle import from YAML configuration."""
self._async_abort_entries_match({CONF_API_KEY: import_data[CONF_API_KEY]})
client = NSAPI(import_data[CONF_API_KEY])
try:
stations = await self.hass.async_add_executor_job(client.get_stations)
except HTTPError:
return self.async_abort(reason="invalid_auth")
except RequestsConnectionError, Timeout:
return self.async_abort(reason="cannot_connect")
except Exception:
_LOGGER.exception("Unexpected exception validating API key")
return self.async_abort(reason="unknown")
station_codes = {station.code for station in stations}
subentries: list[ConfigSubentryData] = []
for route in import_data.get(CONF_ROUTES, []):
# Convert station codes to uppercase for consistency with UI routes
for key in (CONF_FROM, CONF_TO, CONF_VIA):
if key in route:
route[key] = route[key].upper()
if route[key] not in station_codes:
return self.async_abort(reason="invalid_station")
subentries.append(
ConfigSubentryData(
title=route[CONF_NAME],
subentry_type="route",
data=route,
unique_id=None,
)
)
return self.async_create_entry(
title=INTEGRATION_TITLE,
data={CONF_API_KEY: import_data[CONF_API_KEY]},
subentries=subentries,
)
@classmethod
@callback
def async_get_supported_subentry_types(
cls, config_entry: ConfigEntry
) -> dict[str, type[ConfigSubentryFlow]]:
"""Return subentries supported by this integration."""
return {"route": RouteSubentryFlowHandler}
class RouteSubentryFlowHandler(ConfigSubentryFlow):
"""Handle subentry flow for adding and modifying routes."""
def __init__(self) -> None:
"""Initialize route subentry flow."""
self.stations: dict[str, Station] = {}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Add a new route subentry."""
if user_input is not None:
return self.async_create_entry(title=user_input[CONF_NAME], data=user_input)
client = NSAPI(self._get_entry().data[CONF_API_KEY])
if not self.stations:
try:
self.stations = {
station.code: station
for station in await self.hass.async_add_executor_job(
client.get_stations
)
}
except RequestsConnectionError, Timeout, HTTPError, ValueError:
return self.async_abort(reason="cannot_connect")
options = [
SelectOptionDict(label=station.names["long"], value=code)
for code, station in self.stations.items()
]
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_NAME): str,
vol.Required(CONF_FROM): SelectSelector(
SelectSelectorConfig(options=options, sort=True),
),
vol.Required(CONF_TO): SelectSelector(
SelectSelectorConfig(options=options, sort=True),
),
vol.Optional(CONF_VIA): SelectSelector(
SelectSelectorConfig(options=options, sort=True),
),
vol.Optional(CONF_TIME): TimeSelector(),
}
),
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/nederlandse_spoorwegen/config_flow.py",
"license": "Apache License 2.0",
"lines": 201,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/nederlandse_spoorwegen/const.py | """Constants for the Nederlandse Spoorwegen integration."""
from datetime import timedelta
from zoneinfo import ZoneInfo
DOMAIN = "nederlandse_spoorwegen"
INTEGRATION_TITLE = "Nederlandse Spoorwegen"
SUBENTRY_TYPE_ROUTE = "route"
ROUTE_MODEL = "Route"
# Europe/Amsterdam timezone for Dutch rail API expectations
AMS_TZ = ZoneInfo("Europe/Amsterdam")
# Update every 2 minutes
SCAN_INTERVAL = timedelta(minutes=2)
CONF_ROUTES = "routes"
CONF_FROM = "from"
CONF_TO = "to"
CONF_VIA = "via"
CONF_TIME = "time"
# Attribute and schema keys
ATTR_ROUTE = "route"
ATTR_TRIPS = "trips"
ATTR_FIRST_TRIP = "first_trip"
ATTR_NEXT_TRIP = "next_trip"
ATTR_ROUTES = "routes"
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/nederlandse_spoorwegen/const.py",
"license": "Apache License 2.0",
"lines": 22,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/ntfy/entity.py | """Base entity for ntfy integration."""
from __future__ import annotations
from yarl import URL
from homeassistant.config_entries import ConfigSubentry
from homeassistant.const import CONF_URL
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity import Entity, EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import CONF_TOPIC, DOMAIN
from .coordinator import BaseDataUpdateCoordinator, NtfyConfigEntry
class NtfyBaseEntity(Entity):
"""Base entity."""
_attr_has_entity_name = True
_attr_should_poll = False
def __init__(
self,
config_entry: NtfyConfigEntry,
subentry: ConfigSubentry,
) -> None:
"""Initialize the entity."""
self.topic = subentry.data[CONF_TOPIC]
self._attr_unique_id = f"{config_entry.entry_id}_{subentry.subentry_id}_{self.entity_description.key}"
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
manufacturer="ntfy LLC",
model="ntfy",
name=subentry.title,
configuration_url=URL(config_entry.data[CONF_URL]) / self.topic,
identifiers={(DOMAIN, f"{config_entry.entry_id}_{subentry.subentry_id}")},
via_device=(DOMAIN, config_entry.entry_id),
)
self.ntfy = config_entry.runtime_data.account.ntfy
self.config_entry = config_entry
self.subentry = subentry
class NtfyCommonBaseEntity(CoordinatorEntity[BaseDataUpdateCoordinator]):
"""Base entity for common entities."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: BaseDataUpdateCoordinator,
description: EntityDescription,
) -> None:
"""Initialize entity."""
super().__init__(coordinator)
self.entity_description = description
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{description.key}"
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
manufacturer="ntfy LLC",
model="ntfy",
configuration_url=URL(coordinator.config_entry.data[CONF_URL]) / "app",
identifiers={(DOMAIN, coordinator.config_entry.entry_id)},
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/ntfy/entity.py",
"license": "Apache License 2.0",
"lines": 53,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/ntfy/event.py | """Event platform for ntfy integration."""
from __future__ import annotations
import asyncio
import logging
from typing import TYPE_CHECKING
from aiontfy import Event, Notification
from aiontfy.exceptions import (
NtfyConnectionError,
NtfyForbiddenError,
NtfyHTTPError,
NtfyTimeoutError,
)
from homeassistant.components.event import EventEntity, EventEntityDescription
from homeassistant.config_entries import ConfigSubentry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers import issue_registry as ir
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import (
CONF_MESSAGE,
CONF_PRIORITY,
CONF_TAGS,
CONF_TITLE,
CONF_TOPIC,
DOMAIN,
)
from .coordinator import NtfyConfigEntry
from .entity import NtfyBaseEntity
_LOGGER = logging.getLogger(__name__)
PARALLEL_UPDATES = 0
RECONNECT_INTERVAL = 10
async def async_setup_entry(
hass: HomeAssistant,
config_entry: NtfyConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the event platform."""
for subentry_id, subentry in config_entry.subentries.items():
async_add_entities(
[NtfyEventEntity(config_entry, subentry)], config_subentry_id=subentry_id
)
class NtfyEventEntity(NtfyBaseEntity, EventEntity):
"""An event entity."""
entity_description = EventEntityDescription(
key="subscribe",
translation_key="subscribe",
name=None,
event_types=["triggered"],
)
def __init__(
self,
config_entry: NtfyConfigEntry,
subentry: ConfigSubentry,
) -> None:
"""Initialize the entity."""
super().__init__(config_entry, subentry)
self._ws: asyncio.Task | None = None
@callback
def _async_handle_event(self, notification: Notification) -> None:
"""Handle the ntfy event."""
if notification.topic == self.topic and notification.event is Event.MESSAGE:
event = (
f"{notification.title}: {notification.message}"
if notification.title
else notification.message
)
if TYPE_CHECKING:
assert event
self._attr_event_types = [event]
self._trigger_event(event, notification.to_dict())
self.async_write_ha_state()
async def async_added_to_hass(self) -> None:
"""Run when entity about to be added to hass."""
self.config_entry.async_create_background_task(
self.hass,
self.ws_connect(),
"websocket_watchdog",
)
async def ws_connect(self) -> None:
"""Connect websocket."""
while True:
try:
if self._ws and (exc := self._ws.exception()):
raise exc # noqa: TRY301
except asyncio.InvalidStateError:
self._attr_available = True
except asyncio.CancelledError:
self._attr_available = False
return
except NtfyForbiddenError:
if self._attr_available:
_LOGGER.error(
"Failed to subscribe to topic %s. Topic is protected",
self.topic,
)
self._attr_available = False
ir.async_create_issue(
self.hass,
DOMAIN,
f"topic_protected_{self.topic}",
is_fixable=True,
severity=ir.IssueSeverity.ERROR,
translation_key="topic_protected",
translation_placeholders={CONF_TOPIC: self.topic},
data={"entity_id": self.entity_id, "topic": self.topic},
)
return
except NtfyHTTPError as e:
if self._attr_available:
_LOGGER.error(
"Failed to connect to ntfy service due to a server error: %s (%s)",
e.error,
e.link,
)
self._attr_available = False
except NtfyConnectionError:
if self._attr_available:
_LOGGER.error(
"Failed to connect to ntfy service due to a connection error"
)
self._attr_available = False
except NtfyTimeoutError:
if self._attr_available:
_LOGGER.error(
"Failed to connect to ntfy service due to a connection timeout"
)
self._attr_available = False
except Exception:
if self._attr_available:
_LOGGER.exception(
"Failed to connect to ntfy service due to an unexpected exception"
)
self._attr_available = False
finally:
self.async_write_ha_state()
if self._ws is None or self._ws.done():
self._ws = self.config_entry.async_create_background_task(
self.hass,
target=self.ntfy.subscribe(
topics=[self.topic],
callback=self._async_handle_event,
title=self.subentry.data.get(CONF_TITLE),
message=self.subentry.data.get(CONF_MESSAGE),
priority=self.subentry.data.get(CONF_PRIORITY),
tags=self.subentry.data.get(CONF_TAGS),
),
name="ntfy_websocket",
)
await asyncio.sleep(RECONNECT_INTERVAL)
@property
def entity_picture(self) -> str | None:
"""Return the entity picture to use in the frontend, if any."""
return self.state_attributes.get("icon") or super().entity_picture
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/ntfy/event.py",
"license": "Apache License 2.0",
"lines": 152,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/ntfy/repairs.py | """Repairs for ntfy integration."""
from __future__ import annotations
import voluptuous as vol
from homeassistant import data_entry_flow
from homeassistant.components.repairs import ConfirmRepairFlow, RepairsFlow
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from .const import CONF_TOPIC
class TopicProtectedRepairFlow(RepairsFlow):
"""Handler for protected topic issue fixing flow."""
def __init__(self, data: dict[str, str]) -> None:
"""Initialize."""
self.entity_id = data["entity_id"]
self.topic = data["topic"]
async def async_step_init(
self, user_input: dict[str, str] | None = None
) -> data_entry_flow.FlowResult:
"""Init repair flow."""
return await self.async_step_confirm()
async def async_step_confirm(
self, user_input: dict[str, str] | None = None
) -> data_entry_flow.FlowResult:
"""Confirm repair flow."""
if user_input is not None:
er.async_get(self.hass).async_update_entity(
self.entity_id,
disabled_by=er.RegistryEntryDisabler.USER,
)
return self.async_create_entry(data={})
return self.async_show_form(
step_id="confirm",
data_schema=vol.Schema({}),
description_placeholders={CONF_TOPIC: self.topic},
)
async def async_create_fix_flow(
hass: HomeAssistant,
issue_id: str,
data: dict[str, str],
) -> RepairsFlow:
"""Create flow."""
if issue_id.startswith("topic_protected"):
return TopicProtectedRepairFlow(data)
return ConfirmRepairFlow()
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/ntfy/repairs.py",
"license": "Apache License 2.0",
"lines": 43,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/opnsense/const.py | """Constants for OPNsense component."""
DOMAIN = "opnsense"
OPNSENSE_DATA = DOMAIN
CONF_API_SECRET = "api_secret"
CONF_INTERFACE_CLIENT = "interface_client"
CONF_TRACKER_INTERFACES = "tracker_interfaces"
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/opnsense/const.py",
"license": "Apache License 2.0",
"lines": 6,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/portainer/binary_sensor.py | """Binary sensor platform for Portainer."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from homeassistant.components.binary_sensor import (
BinarySensorDeviceClass,
BinarySensorEntity,
BinarySensorEntityDescription,
)
from homeassistant.const import EntityCategory
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import PortainerConfigEntry
from .const import CONTAINER_STATE_RUNNING, STACK_STATUS_ACTIVE
from .coordinator import PortainerContainerData
from .entity import (
PortainerContainerEntity,
PortainerCoordinatorData,
PortainerEndpointEntity,
PortainerStackData,
PortainerStackEntity,
)
PARALLEL_UPDATES = 1
@dataclass(frozen=True, kw_only=True)
class PortainerContainerBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Class to hold Portainer container binary sensor description."""
state_fn: Callable[[PortainerContainerData], bool | None]
@dataclass(frozen=True, kw_only=True)
class PortainerEndpointBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Class to hold Portainer endpoint binary sensor description."""
state_fn: Callable[[PortainerCoordinatorData], bool | None]
@dataclass(frozen=True, kw_only=True)
class PortainerStackBinarySensorEntityDescription(BinarySensorEntityDescription):
"""Class to hold Portainer stack binary sensor description."""
state_fn: Callable[[PortainerStackData], bool | None]
CONTAINER_SENSORS: tuple[PortainerContainerBinarySensorEntityDescription, ...] = (
PortainerContainerBinarySensorEntityDescription(
key="status",
translation_key="status",
state_fn=lambda data: data.container.state == CONTAINER_STATE_RUNNING,
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
),
)
ENDPOINT_SENSORS: tuple[PortainerEndpointBinarySensorEntityDescription, ...] = (
PortainerEndpointBinarySensorEntityDescription(
key="status",
translation_key="status",
state_fn=lambda data: data.endpoint.status == 1, # 1 = Running | 2 = Stopped
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
),
)
STACK_SENSORS: tuple[PortainerStackBinarySensorEntityDescription, ...] = (
PortainerStackBinarySensorEntityDescription(
key="stack_status",
translation_key="status",
state_fn=lambda data: (
data.stack.status == STACK_STATUS_ACTIVE
), # 1 = Active | 2 = Inactive
device_class=BinarySensorDeviceClass.RUNNING,
entity_category=EntityCategory.DIAGNOSTIC,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: PortainerConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Portainer binary sensors."""
coordinator = entry.runtime_data
def _async_add_new_endpoints(endpoints: list[PortainerCoordinatorData]) -> None:
"""Add new endpoint binary sensors."""
async_add_entities(
PortainerEndpointSensor(
coordinator,
entity_description,
endpoint,
)
for entity_description in ENDPOINT_SENSORS
for endpoint in endpoints
if entity_description.state_fn(endpoint)
)
def _async_add_new_containers(
containers: list[tuple[PortainerCoordinatorData, PortainerContainerData]],
) -> None:
"""Add new container binary sensors."""
async_add_entities(
PortainerContainerSensor(
coordinator,
entity_description,
container,
endpoint,
)
for (endpoint, container) in containers
for entity_description in CONTAINER_SENSORS
if entity_description.state_fn(container)
)
def _async_add_new_stacks(
stacks: list[tuple[PortainerCoordinatorData, PortainerStackData]],
) -> None:
"""Add new stack sensors."""
async_add_entities(
PortainerStackSensor(
coordinator,
entity_description,
stack,
endpoint,
)
for (endpoint, stack) in stacks
for entity_description in STACK_SENSORS
)
coordinator.new_endpoints_callbacks.append(_async_add_new_endpoints)
coordinator.new_containers_callbacks.append(_async_add_new_containers)
coordinator.new_stacks_callbacks.append(_async_add_new_stacks)
_async_add_new_endpoints(
[
endpoint
for endpoint in coordinator.data.values()
if endpoint.id in coordinator.known_endpoints
]
)
_async_add_new_containers(
[
(endpoint, container)
for endpoint in coordinator.data.values()
for container in endpoint.containers.values()
]
)
_async_add_new_stacks(
[
(endpoint, stack)
for endpoint in coordinator.data.values()
for stack in endpoint.stacks.values()
]
)
class PortainerEndpointSensor(PortainerEndpointEntity, BinarySensorEntity):
"""Representation of a Portainer endpoint binary sensor entity."""
entity_description: PortainerEndpointBinarySensorEntityDescription
@property
def is_on(self) -> bool | None:
"""Return true if the binary sensor is on."""
return self.entity_description.state_fn(self.coordinator.data[self.device_id])
class PortainerContainerSensor(PortainerContainerEntity, BinarySensorEntity):
"""Representation of a Portainer container sensor."""
entity_description: PortainerContainerBinarySensorEntityDescription
@property
def is_on(self) -> bool | None:
"""Return true if the binary sensor is on."""
return self.entity_description.state_fn(self.container_data)
class PortainerStackSensor(PortainerStackEntity, BinarySensorEntity):
"""Representation of a Portainer stack sensor."""
entity_description: PortainerStackBinarySensorEntityDescription
@property
def is_on(self) -> bool | None:
"""Return true if the binary sensor is on."""
return self.entity_description.state_fn(self.stack_data)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/portainer/binary_sensor.py",
"license": "Apache License 2.0",
"lines": 157,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/portainer/config_flow.py | """Config flow for the portainer integration."""
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from pyportainer import (
Portainer,
PortainerAuthenticationError,
PortainerConnectionError,
PortainerTimeoutError,
)
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_API_TOKEN, CONF_URL, CONF_VERIFY_SSL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from .const import DOMAIN
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_URL): str,
vol.Required(CONF_API_TOKEN): str,
vol.Optional(CONF_VERIFY_SSL, default=True): bool,
}
)
async def _validate_input(hass: HomeAssistant, data: dict[str, Any]) -> None:
"""Validate the user input allows us to connect."""
client = Portainer(
api_url=data[CONF_URL],
api_key=data[CONF_API_TOKEN],
session=async_get_clientsession(hass=hass, verify_ssl=data[CONF_VERIFY_SSL]),
)
try:
await client.get_endpoints()
except PortainerAuthenticationError:
raise InvalidAuth from None
except PortainerConnectionError as err:
raise CannotConnect from err
except PortainerTimeoutError as err:
raise PortainerTimeout from err
_LOGGER.debug("Connected to Portainer API: %s", data[CONF_URL])
class PortainerConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Portainer."""
VERSION = 4
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
if user_input is not None:
self._async_abort_entries_match({CONF_URL: user_input[CONF_URL]})
try:
await _validate_input(self.hass, user_input)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except PortainerTimeout:
errors["base"] = "timeout_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(user_input[CONF_API_TOKEN])
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=user_input[CONF_URL], data=user_input
)
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Perform reauth when Portainer API authentication fails."""
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reauth: ask for new API token and validate."""
errors: dict[str, str] = {}
reauth_entry = self._get_reauth_entry()
if user_input is not None:
try:
await _validate_input(
self.hass,
data={
**reauth_entry.data,
CONF_API_TOKEN: user_input[CONF_API_TOKEN],
},
)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except PortainerTimeout:
errors["base"] = "timeout_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
return self.async_update_reload_and_abort(
reauth_entry,
data_updates={CONF_API_TOKEN: user_input[CONF_API_TOKEN]},
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema({vol.Required(CONF_API_TOKEN): str}),
errors=errors,
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reconfiguration of the integration."""
errors: dict[str, str] = {}
reconf_entry = self._get_reconfigure_entry()
suggested_values = {
CONF_URL: reconf_entry.data[CONF_URL],
CONF_API_TOKEN: reconf_entry.data[CONF_API_TOKEN],
CONF_VERIFY_SSL: reconf_entry.data[CONF_VERIFY_SSL],
}
if user_input:
try:
await _validate_input(
self.hass,
data={
**reconf_entry.data,
**user_input,
},
)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except PortainerTimeout:
errors["base"] = "timeout_connect"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(user_input[CONF_API_TOKEN])
self._abort_if_unique_id_configured()
return self.async_update_reload_and_abort(
reconf_entry,
data_updates={
CONF_URL: user_input[CONF_URL],
CONF_API_TOKEN: user_input[CONF_API_TOKEN],
CONF_VERIFY_SSL: user_input[CONF_VERIFY_SSL],
},
)
return self.async_show_form(
step_id="reconfigure",
data_schema=self.add_suggested_values_to_schema(
data_schema=STEP_USER_DATA_SCHEMA,
suggested_values=user_input or suggested_values,
),
errors=errors,
)
class CannotConnect(HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(HomeAssistantError):
"""Error to indicate there is invalid auth."""
class PortainerTimeout(HomeAssistantError):
"""Error to indicate a timeout occurred."""
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/portainer/config_flow.py",
"license": "Apache License 2.0",
"lines": 165,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/portainer/const.py | """Constants for the Portainer integration."""
DOMAIN = "portainer"
DEFAULT_NAME = "Portainer"
ENDPOINT_STATUS_DOWN = 2
CONTAINER_STATE_RUNNING = "running"
STACK_STATUS_ACTIVE = 1
STACK_STATUS_INACTIVE = 2
STACK_TYPE_SWARM = 1
STACK_TYPE_COMPOSE = 2
STACK_TYPE_KUBERNETES = 3
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/portainer/const.py",
"license": "Apache License 2.0",
"lines": 10,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/portainer/coordinator.py | """Data Update Coordinator for Portainer."""
from __future__ import annotations
import asyncio
from collections.abc import Callable
from dataclasses import dataclass
from datetime import timedelta
import logging
from pyportainer import (
Portainer,
PortainerAuthenticationError,
PortainerConnectionError,
PortainerTimeoutError,
)
from pyportainer.models.docker import (
DockerContainer,
DockerContainerStats,
DockerSystemDF,
)
from pyportainer.models.docker_inspect import DockerInfo, DockerVersion
from pyportainer.models.portainer import Endpoint
from pyportainer.models.stacks import Stack
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_URL
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed, ConfigEntryNotReady
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import CONTAINER_STATE_RUNNING, DOMAIN, ENDPOINT_STATUS_DOWN
type PortainerConfigEntry = ConfigEntry[PortainerCoordinator]
_LOGGER = logging.getLogger(__name__)
DEFAULT_SCAN_INTERVAL = timedelta(seconds=60)
@dataclass
class PortainerCoordinatorData:
"""Data class for Portainer Coordinator."""
id: int
name: str | None
endpoint: Endpoint
containers: dict[str, PortainerContainerData]
docker_version: DockerVersion
docker_info: DockerInfo
docker_system_df: DockerSystemDF
stacks: dict[str, PortainerStackData]
@dataclass(slots=True)
class PortainerContainerData:
"""Container data held by the Portainer coordinator."""
container: DockerContainer
stats: DockerContainerStats | None
stats_pre: DockerContainerStats | None
stack: Stack | None
@dataclass(slots=True)
class PortainerStackData:
"""Stack data held by the Portainer coordinator."""
stack: Stack
container_count: int = 0
class PortainerCoordinator(DataUpdateCoordinator[dict[int, PortainerCoordinatorData]]):
"""Data Update Coordinator for Portainer."""
config_entry: PortainerConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: PortainerConfigEntry,
portainer: Portainer,
) -> None:
"""Initialize the Portainer Data Update Coordinator."""
super().__init__(
hass,
_LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=DEFAULT_SCAN_INTERVAL,
)
self.portainer = portainer
self.known_endpoints: set[int] = set()
self.known_containers: set[tuple[int, str]] = set()
self.known_stacks: set[tuple[int, str]] = set()
self.new_endpoints_callbacks: list[
Callable[[list[PortainerCoordinatorData]], None]
] = []
self.new_containers_callbacks: list[
Callable[
[list[tuple[PortainerCoordinatorData, PortainerContainerData]]], None
]
] = []
self.new_stacks_callbacks: list[
Callable[[list[tuple[PortainerCoordinatorData, PortainerStackData]]], None]
] = []
async def _async_setup(self) -> None:
"""Set up the Portainer Data Update Coordinator."""
try:
await self.portainer.get_endpoints()
except PortainerAuthenticationError as err:
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": repr(err)},
) from err
except PortainerConnectionError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={"error": repr(err)},
) from err
except PortainerTimeoutError as err:
raise ConfigEntryNotReady(
translation_domain=DOMAIN,
translation_key="timeout_connect",
translation_placeholders={"error": repr(err)},
) from err
async def _async_update_data(self) -> dict[int, PortainerCoordinatorData]:
"""Fetch data from Portainer API."""
_LOGGER.debug(
"Fetching data from Portainer API: %s", self.config_entry.data[CONF_URL]
)
try:
endpoints = await self.portainer.get_endpoints()
except PortainerAuthenticationError as err:
_LOGGER.error("Authentication error: %s", repr(err))
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": repr(err)},
) from err
except PortainerConnectionError as err:
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={"error": repr(err)},
) from err
mapped_endpoints: dict[int, PortainerCoordinatorData] = {}
for endpoint in endpoints:
if endpoint.status == ENDPOINT_STATUS_DOWN:
_LOGGER.debug(
"Skipping offline endpoint: %s (ID: %d)",
endpoint.name,
endpoint.id,
)
continue
try:
(
containers,
docker_version,
docker_info,
docker_system_df,
stacks,
) = await asyncio.gather(
self.portainer.get_containers(endpoint.id),
self.portainer.docker_version(endpoint.id),
self.portainer.docker_info(endpoint.id),
self.portainer.docker_system_df(endpoint.id),
self.portainer.get_stacks(endpoint.id),
)
prev_endpoint = self.data.get(endpoint.id) if self.data else None
container_map: dict[str, PortainerContainerData] = {}
stack_map: dict[str, PortainerStackData] = {
stack.name: PortainerStackData(stack=stack, container_count=0)
for stack in stacks
}
# Map containers, started and stopped
for container in containers:
container_name = self._get_container_name(container.names[0])
prev_container = (
prev_endpoint.containers.get(container_name)
if prev_endpoint
else None
)
# Check if container belongs to a stack via docker compose label
stack_name: str | None = (
container.labels.get("com.docker.compose.project")
if container.labels
else None
)
if stack_name and (stack_data := stack_map.get(stack_name)):
stack_data.container_count += 1
container_map[container_name] = PortainerContainerData(
container=container,
stats=None,
stats_pre=prev_container.stats if prev_container else None,
stack=stack_map[stack_name].stack
if stack_name and stack_name in stack_map
else None,
)
# Separately fetch stats for running containers
running_containers = [
container
for container in containers
if container.state == CONTAINER_STATE_RUNNING
]
if running_containers:
container_stats = dict(
zip(
(
self._get_container_name(container.names[0])
for container in running_containers
),
await asyncio.gather(
*(
self.portainer.container_stats(
endpoint_id=endpoint.id,
container_id=container.id,
)
for container in running_containers
)
),
strict=False,
)
)
# Now assign stats to the containers
for container_name, stats in container_stats.items():
container_map[container_name].stats = stats
except PortainerConnectionError as err:
_LOGGER.exception("Connection error")
raise UpdateFailed(
translation_domain=DOMAIN,
translation_key="cannot_connect",
translation_placeholders={"error": repr(err)},
) from err
except PortainerAuthenticationError as err:
_LOGGER.exception("Authentication error")
raise ConfigEntryAuthFailed(
translation_domain=DOMAIN,
translation_key="invalid_auth",
translation_placeholders={"error": repr(err)},
) from err
mapped_endpoints[endpoint.id] = PortainerCoordinatorData(
id=endpoint.id,
name=endpoint.name,
endpoint=endpoint,
containers=container_map,
docker_version=docker_version,
docker_info=docker_info,
docker_system_df=docker_system_df,
stacks=stack_map,
)
self._async_add_remove_endpoints(mapped_endpoints)
return mapped_endpoints
def _async_add_remove_endpoints(
self, mapped_endpoints: dict[int, PortainerCoordinatorData]
) -> None:
"""Add new endpoints, remove non-existing endpoints."""
current_endpoints = {endpoint.id for endpoint in mapped_endpoints.values()}
new_endpoints = current_endpoints - self.known_endpoints
if new_endpoints:
_LOGGER.debug("New endpoints found: %s", new_endpoints)
self.known_endpoints.update(new_endpoints)
# Surprise, we also handle containers here :)
current_containers = {
(endpoint.id, container_name)
for endpoint in mapped_endpoints.values()
for container_name in endpoint.containers
}
new_containers = current_containers - self.known_containers
if new_containers:
_LOGGER.debug("New containers found: %s", new_containers)
self.known_containers.update(new_containers)
# Stack management
current_stacks = {
(endpoint.id, stack_name)
for endpoint in mapped_endpoints.values()
for stack_name in endpoint.stacks
}
new_stacks = current_stacks - self.known_stacks
if new_stacks:
_LOGGER.debug("New stacks found: %s", new_stacks)
self.known_stacks.update(new_stacks)
def _get_container_name(self, container_name: str) -> str:
"""Sanitize to get a proper container name."""
return container_name.replace("/", " ").strip()
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/portainer/coordinator.py",
"license": "Apache License 2.0",
"lines": 266,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/portainer/entity.py | """Base class for Portainer entities."""
from yarl import URL
from homeassistant.const import CONF_URL
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity import EntityDescription
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DEFAULT_NAME, DOMAIN
from .coordinator import (
PortainerContainerData,
PortainerCoordinator,
PortainerCoordinatorData,
PortainerStackData,
)
class PortainerCoordinatorEntity(CoordinatorEntity[PortainerCoordinator]):
"""Base class for Portainer entities."""
_attr_has_entity_name = True
class PortainerEndpointEntity(PortainerCoordinatorEntity):
"""Base implementation for Portainer endpoint."""
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: EntityDescription,
device_info: PortainerCoordinatorData,
) -> None:
"""Initialize a Portainer endpoint."""
super().__init__(coordinator)
self.entity_description = entity_description
self._device_info = device_info
self.device_id = device_info.endpoint.id
self._attr_device_info = DeviceInfo(
identifiers={
(DOMAIN, f"{coordinator.config_entry.entry_id}_{self.device_id}")
},
configuration_url=URL(
f"{coordinator.config_entry.data[CONF_URL]}#!/{self.device_id}/docker/dashboard"
),
manufacturer=DEFAULT_NAME,
model="Endpoint",
name=device_info.endpoint.name,
entry_type=DeviceEntryType.SERVICE,
)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{device_info.id}_{entity_description.key}"
@property
def available(self) -> bool:
"""Return if the device is available."""
return super().available and self.device_id in self.coordinator.data
class PortainerContainerEntity(PortainerCoordinatorEntity):
"""Base implementation for Portainer container."""
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: EntityDescription,
device_info: PortainerContainerData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize a Portainer container."""
super().__init__(coordinator)
self.entity_description = entity_description
self._device_info = device_info
self.device_id = self._device_info.container.id
self.endpoint_id = via_device.endpoint.id
# Container ID's are ephemeral, so use the container name for the unique ID
# The first one, should always be unique, it's fine if users have aliases
# According to Docker's API docs, the first name is unique
names = self._device_info.container.names
assert names, "Container names list unexpectedly empty"
self.device_name = names[0].replace("/", " ").strip()
self._attr_device_info = DeviceInfo(
identifiers={
(
DOMAIN,
f"{self.coordinator.config_entry.entry_id}_{self.endpoint_id}_{self.device_name}",
)
},
manufacturer=DEFAULT_NAME,
configuration_url=URL(
f"{coordinator.config_entry.data[CONF_URL]}#!/{self.endpoint_id}/docker/containers/{self.device_id}"
),
model="Container",
name=self.device_name,
# If the container belongs to a stack, nest it under the stack
# else it's the endpoint
via_device=(
DOMAIN,
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_stack_{device_info.stack.id}"
if device_info.stack
else f"{coordinator.config_entry.entry_id}_{self.endpoint_id}",
),
translation_key=None if self.device_name else "unknown_container",
entry_type=DeviceEntryType.SERVICE,
)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.device_name}_{entity_description.key}"
@property
def available(self) -> bool:
"""Return if the device is available."""
return (
super().available
and self.endpoint_id in self.coordinator.data
and self.device_name in self.coordinator.data[self.endpoint_id].containers
)
@property
def container_data(self) -> PortainerContainerData:
"""Return the coordinator data for this container."""
return self.coordinator.data[self.endpoint_id].containers[self.device_name]
class PortainerStackEntity(PortainerCoordinatorEntity):
"""Base implementation for Portainer stack."""
def __init__(
self,
coordinator: PortainerCoordinator,
entity_description: EntityDescription,
device_info: PortainerStackData,
via_device: PortainerCoordinatorData,
) -> None:
"""Initialize a Portainer stack."""
super().__init__(coordinator)
self.entity_description = entity_description
self._device_info = device_info
self.stack_id = device_info.stack.id
self.device_name = device_info.stack.name
self.endpoint_id = via_device.endpoint.id
self.endpoint_name = via_device.endpoint.name
self._attr_device_info = DeviceInfo(
identifiers={
(
DOMAIN,
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}_stack_{self.stack_id}",
)
},
manufacturer=DEFAULT_NAME,
configuration_url=URL(
f"{coordinator.config_entry.data[CONF_URL]}#!/{self.endpoint_id}/docker/stacks/{self.device_name}"
),
model="Stack",
name=self.device_name,
via_device=(
DOMAIN,
f"{coordinator.config_entry.entry_id}_{self.endpoint_id}",
),
)
self._attr_unique_id = f"{coordinator.config_entry.entry_id}_{self.stack_id}_{entity_description.key}"
@property
def available(self) -> bool:
"""Return if the stack is available."""
return (
super().available
and self.endpoint_id in self.coordinator.data
and self.device_name in self.coordinator.data[self.endpoint_id].stacks
)
@property
def stack_data(self) -> PortainerStackData:
"""Return the coordinator data for this stack."""
return self.coordinator.data[self.endpoint_id].stacks[self.device_name]
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/portainer/entity.py",
"license": "Apache License 2.0",
"lines": 152,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/route_b_smart_meter/config_flow.py | """Config flow for Smart Meter B Route integration."""
import logging
from typing import Any
from momonga import Momonga, MomongaSkJoinFailure, MomongaSkScanFailure
from serial.tools.list_ports import comports
from serial.tools.list_ports_common import ListPortInfo
import voluptuous as vol
from homeassistant.components.usb import get_serial_by_id, human_readable_device_name
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.const import CONF_DEVICE, CONF_ID, CONF_PASSWORD
from homeassistant.core import callback
from homeassistant.helpers.service_info.usb import UsbServiceInfo
from .const import DOMAIN, ENTRY_TITLE
_LOGGER = logging.getLogger(__name__)
def _validate_input(device: str, id: str, password: str) -> None:
"""Validate the user input allows us to connect."""
with Momonga(dev=device, rbid=id, pwd=password):
pass
def _human_readable_device_name(port: UsbServiceInfo | ListPortInfo) -> str:
return human_readable_device_name(
port.device,
port.serial_number,
port.manufacturer,
port.description,
str(port.vid) if port.vid else None,
str(port.pid) if port.pid else None,
)
class BRouteConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Smart Meter B Route."""
VERSION = 1
device: UsbServiceInfo | None = None
@callback
def _get_discovered_device_id_and_name(
self, device_options: dict[str, ListPortInfo]
) -> tuple[str | None, str | None]:
discovered_device_id = (
get_serial_by_id(self.device.device) if self.device else None
)
discovered_device = (
device_options.get(discovered_device_id) if discovered_device_id else None
)
discovered_device_name = (
_human_readable_device_name(discovered_device)
if discovered_device
else None
)
return discovered_device_id, discovered_device_name
async def _get_usb_devices(self) -> dict[str, ListPortInfo]:
"""Return a list of available USB devices."""
devices = await self.hass.async_add_executor_job(comports)
return {get_serial_by_id(port.device): port for port in devices}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle the initial step."""
errors: dict[str, str] = {}
device_options = await self._get_usb_devices()
if user_input is not None:
try:
await self.hass.async_add_executor_job(
_validate_input,
user_input[CONF_DEVICE],
user_input[CONF_ID],
user_input[CONF_PASSWORD],
)
except MomongaSkScanFailure:
errors["base"] = "cannot_connect"
except MomongaSkJoinFailure:
errors["base"] = "invalid_auth"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
await self.async_set_unique_id(
user_input[CONF_ID], raise_on_progress=False
)
self._abort_if_unique_id_configured()
return self.async_create_entry(title=ENTRY_TITLE, data=user_input)
discovered_device_id, discovered_device_name = (
self._get_discovered_device_id_and_name(device_options)
)
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(CONF_DEVICE, default=discovered_device_id): vol.In(
{discovered_device_id: discovered_device_name}
if discovered_device_id and discovered_device_name
else {
name: _human_readable_device_name(device)
for name, device in device_options.items()
}
),
vol.Required(CONF_ID): str,
vol.Required(CONF_PASSWORD): str,
}
),
errors=errors,
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/route_b_smart_meter/config_flow.py",
"license": "Apache License 2.0",
"lines": 99,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/route_b_smart_meter/const.py | """Constants for the Smart Meter B Route integration."""
from datetime import timedelta
DOMAIN = "route_b_smart_meter"
ENTRY_TITLE = "Route B Smart Meter"
DEFAULT_SCAN_INTERVAL = timedelta(seconds=300)
ATTR_API_INSTANTANEOUS_POWER = "instantaneous_power"
ATTR_API_TOTAL_CONSUMPTION = "total_consumption"
ATTR_API_INSTANTANEOUS_CURRENT_T_PHASE = "instantaneous_current_t_phase"
ATTR_API_INSTANTANEOUS_CURRENT_R_PHASE = "instantaneous_current_r_phase"
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/route_b_smart_meter/const.py",
"license": "Apache License 2.0",
"lines": 9,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/route_b_smart_meter/coordinator.py | """DataUpdateCoordinator for the Smart Meter B-route integration."""
from dataclasses import dataclass
import logging
import time
from momonga import Momonga, MomongaError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_DEVICE, CONF_ID, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import DEFAULT_SCAN_INTERVAL, DOMAIN
_LOGGER = logging.getLogger(__name__)
@dataclass
class BRouteData:
"""Class for data of the B Route."""
instantaneous_current_r_phase: float
instantaneous_current_t_phase: float
instantaneous_power: float
total_consumption: float
type BRouteConfigEntry = ConfigEntry[BRouteUpdateCoordinator]
@dataclass
class BRouteDeviceInfo:
"""Static device information fetched once at setup."""
serial_number: str | None = None
manufacturer_code: str | None = None
echonet_version: str | None = None
class BRouteUpdateCoordinator(DataUpdateCoordinator[BRouteData]):
"""The B Route update coordinator."""
device_info_data: BRouteDeviceInfo
def __init__(
self,
hass: HomeAssistant,
entry: BRouteConfigEntry,
) -> None:
"""Initialize."""
self.device = entry.data[CONF_DEVICE]
self.bid = entry.data[CONF_ID]
self._password = entry.data[CONF_PASSWORD]
self.api = Momonga(dev=self.device, rbid=self.bid, pwd=self._password)
super().__init__(
hass,
_LOGGER,
name=DOMAIN,
config_entry=entry,
update_interval=DEFAULT_SCAN_INTERVAL,
)
self.device_info_data = BRouteDeviceInfo()
async def _async_setup(self) -> None:
def fetch() -> None:
self.api.open()
self._fetch_device_info()
await self.hass.async_add_executor_job(fetch)
def _fetch_device_info(self) -> None:
"""Fetch static device information from the smart meter."""
try:
self.device_info_data.serial_number = self.api.get_serial_number()
except MomongaError:
_LOGGER.debug("Failed to fetch serial number", exc_info=True)
time.sleep(self.api.internal_xmit_interval)
try:
raw = self.api.get_manufacturer_code()
self.device_info_data.manufacturer_code = raw.hex().upper()
except MomongaError:
_LOGGER.debug("Failed to fetch manufacturer code", exc_info=True)
time.sleep(self.api.internal_xmit_interval)
try:
self.device_info_data.echonet_version = self.api.get_standard_version()
except MomongaError:
_LOGGER.debug("Failed to fetch ECHONET Lite version", exc_info=True)
def _get_data(self) -> BRouteData:
"""Get the data from API."""
current = self.api.get_instantaneous_current()
return BRouteData(
instantaneous_current_r_phase=current["r phase current"],
instantaneous_current_t_phase=current["t phase current"],
instantaneous_power=self.api.get_instantaneous_power(),
total_consumption=self.api.get_measured_cumulative_energy(),
)
async def _async_update_data(self) -> BRouteData:
"""Update data."""
try:
return await self.hass.async_add_executor_job(self._get_data)
except MomongaError as error:
raise UpdateFailed(error) from error
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/route_b_smart_meter/coordinator.py",
"license": "Apache License 2.0",
"lines": 83,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/route_b_smart_meter/sensor.py | """Smart Meter B Route."""
from collections.abc import Callable
from dataclasses import dataclass
from typing import Literal
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import UnitOfElectricCurrent, UnitOfEnergy, UnitOfPower
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from . import BRouteConfigEntry
from .const import (
ATTR_API_INSTANTANEOUS_CURRENT_R_PHASE,
ATTR_API_INSTANTANEOUS_CURRENT_T_PHASE,
ATTR_API_INSTANTANEOUS_POWER,
ATTR_API_TOTAL_CONSUMPTION,
DOMAIN,
)
from .coordinator import BRouteData, BRouteUpdateCoordinator
@dataclass(frozen=True, kw_only=True)
class SensorEntityDescriptionWithValueAccessor(SensorEntityDescription):
"""Sensor entity description with data accessor."""
value_accessor: Callable[[BRouteData], StateType]
SENSOR_DESCRIPTIONS = (
SensorEntityDescriptionWithValueAccessor(
key=ATTR_API_INSTANTANEOUS_CURRENT_R_PHASE,
translation_key=ATTR_API_INSTANTANEOUS_CURRENT_R_PHASE,
device_class=SensorDeviceClass.CURRENT,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
value_accessor=lambda data: data.instantaneous_current_r_phase,
),
SensorEntityDescriptionWithValueAccessor(
key=ATTR_API_INSTANTANEOUS_CURRENT_T_PHASE,
translation_key=ATTR_API_INSTANTANEOUS_CURRENT_T_PHASE,
device_class=SensorDeviceClass.CURRENT,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfElectricCurrent.AMPERE,
value_accessor=lambda data: data.instantaneous_current_t_phase,
),
SensorEntityDescriptionWithValueAccessor(
key=ATTR_API_INSTANTANEOUS_POWER,
translation_key=ATTR_API_INSTANTANEOUS_POWER,
device_class=SensorDeviceClass.POWER,
state_class=SensorStateClass.MEASUREMENT,
native_unit_of_measurement=UnitOfPower.WATT,
value_accessor=lambda data: data.instantaneous_power,
),
SensorEntityDescriptionWithValueAccessor(
key=ATTR_API_TOTAL_CONSUMPTION,
translation_key=ATTR_API_TOTAL_CONSUMPTION,
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
value_accessor=lambda data: data.total_consumption,
),
)
_DEVICE_INFO_MAPPING: dict[
Literal["manufacturer", "serial_number", "sw_version"],
Callable[[BRouteUpdateCoordinator], str | None],
] = {
"manufacturer": lambda coordinator: coordinator.device_info_data.manufacturer_code,
"serial_number": lambda coordinator: coordinator.device_info_data.serial_number,
"sw_version": lambda coordinator: coordinator.device_info_data.echonet_version,
}
def _build_device_info(coordinator: BRouteUpdateCoordinator) -> DeviceInfo:
"""Build device information from coordinator data."""
device = DeviceInfo(
identifiers={(DOMAIN, coordinator.bid)},
name=f"Route B Smart Meter {coordinator.bid}",
)
for key, fn in _DEVICE_INFO_MAPPING.items():
if (value := fn(coordinator)) is not None:
device[key] = value
return device
async def async_setup_entry(
hass: HomeAssistant,
entry: BRouteConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Smart Meter B-route entry."""
coordinator = entry.runtime_data
async_add_entities(
SmartMeterBRouteSensor(coordinator, description)
for description in SENSOR_DESCRIPTIONS
)
class SmartMeterBRouteSensor(CoordinatorEntity[BRouteUpdateCoordinator], SensorEntity):
"""Representation of a Smart Meter B-route sensor entity."""
_attr_has_entity_name = True
def __init__(
self,
coordinator: BRouteUpdateCoordinator,
description: SensorEntityDescriptionWithValueAccessor,
) -> None:
"""Initialize Smart Meter B-route sensor entity."""
super().__init__(coordinator)
self.entity_description: SensorEntityDescriptionWithValueAccessor = description
self._attr_unique_id = f"{coordinator.bid}_{description.key}"
self._attr_device_info = _build_device_info(coordinator)
@property
def native_value(self) -> StateType:
"""Return the state of the sensor."""
return self.entity_description.value_accessor(self.coordinator.data)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/route_b_smart_meter/sensor.py",
"license": "Apache License 2.0",
"lines": 109,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/satel_integra/config_flow.py | """Config flow for Satel Integra."""
from __future__ import annotations
import logging
from typing import Any
from satel_integra.satel_integra import AsyncSatel
import voluptuous as vol
from homeassistant.components.binary_sensor import BinarySensorDeviceClass
from homeassistant.config_entries import (
ConfigEntry,
ConfigFlow,
ConfigFlowResult,
ConfigSubentryData,
ConfigSubentryFlow,
OptionsFlow,
SubentryFlowResult,
)
from homeassistant.const import CONF_CODE, CONF_HOST, CONF_NAME, CONF_PORT
from homeassistant.core import callback
from homeassistant.helpers import config_validation as cv, selector
from .const import (
CONF_ARM_HOME_MODE,
CONF_DEVICE_PARTITIONS,
CONF_OUTPUT_NUMBER,
CONF_OUTPUTS,
CONF_PARTITION_NUMBER,
CONF_SWITCHABLE_OUTPUT_NUMBER,
CONF_SWITCHABLE_OUTPUTS,
CONF_ZONE_NUMBER,
CONF_ZONE_TYPE,
CONF_ZONES,
DEFAULT_CONF_ARM_HOME_MODE,
DEFAULT_PORT,
DOMAIN,
SUBENTRY_TYPE_OUTPUT,
SUBENTRY_TYPE_PARTITION,
SUBENTRY_TYPE_SWITCHABLE_OUTPUT,
SUBENTRY_TYPE_ZONE,
)
from .coordinator import SatelConfigEntry
_LOGGER = logging.getLogger(__package__)
CONNECTION_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): str,
vol.Required(CONF_PORT, default=DEFAULT_PORT): cv.port,
vol.Optional(CONF_CODE): cv.string,
}
)
CODE_SCHEMA = vol.Schema(
{
vol.Optional(CONF_CODE): cv.string,
}
)
PARTITION_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Required(CONF_ARM_HOME_MODE, default=DEFAULT_CONF_ARM_HOME_MODE): vol.In(
[1, 2, 3]
),
}
)
ZONE_AND_OUTPUT_SCHEMA = vol.Schema(
{
vol.Required(CONF_NAME): cv.string,
vol.Required(
CONF_ZONE_TYPE, default=BinarySensorDeviceClass.MOTION
): selector.SelectSelector(
selector.SelectSelectorConfig(
options=[cls.value for cls in BinarySensorDeviceClass],
mode=selector.SelectSelectorMode.DROPDOWN,
translation_key="binary_sensor_device_class",
sort=True,
),
),
}
)
SWITCHABLE_OUTPUT_SCHEMA = vol.Schema({vol.Required(CONF_NAME): cv.string})
class SatelConfigFlow(ConfigFlow, domain=DOMAIN):
"""Handle a Satel Integra config flow."""
VERSION = 2
MINOR_VERSION = 1
@staticmethod
@callback
def async_get_options_flow(
config_entry: SatelConfigEntry,
) -> SatelOptionsFlow:
"""Create the options flow."""
return SatelOptionsFlow()
@classmethod
@callback
def async_get_supported_subentry_types(
cls, config_entry: ConfigEntry
) -> dict[str, type[ConfigSubentryFlow]]:
"""Return subentries supported by this integration."""
return {
SUBENTRY_TYPE_PARTITION: PartitionSubentryFlowHandler,
SUBENTRY_TYPE_ZONE: ZoneSubentryFlowHandler,
SUBENTRY_TYPE_OUTPUT: OutputSubentryFlowHandler,
SUBENTRY_TYPE_SWITCHABLE_OUTPUT: SwitchableOutputSubentryFlowHandler,
}
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle a flow initialized by the user."""
errors: dict[str, str] = {}
if user_input is not None:
self._async_abort_entries_match({CONF_HOST: user_input[CONF_HOST]})
valid = await self.test_connection(
user_input[CONF_HOST], user_input[CONF_PORT]
)
if valid:
return self.async_create_entry(
title=user_input[CONF_HOST],
data={
CONF_HOST: user_input[CONF_HOST],
CONF_PORT: user_input[CONF_PORT],
},
options={CONF_CODE: user_input.get(CONF_CODE)},
)
errors["base"] = "cannot_connect"
return self.async_show_form(
step_id="user", data_schema=CONNECTION_SCHEMA, errors=errors
)
async def async_step_import(
self, import_config: dict[str, Any]
) -> ConfigFlowResult:
"""Handle a flow initialized by import."""
valid = await self.test_connection(
import_config[CONF_HOST], import_config.get(CONF_PORT, DEFAULT_PORT)
)
if valid:
subentries: list[ConfigSubentryData] = []
for partition_number, partition_data in import_config.get(
CONF_DEVICE_PARTITIONS, {}
).items():
subentries.append(
{
"subentry_type": SUBENTRY_TYPE_PARTITION,
"title": f"{partition_data[CONF_NAME]} ({partition_number})",
"unique_id": f"{SUBENTRY_TYPE_PARTITION}_{partition_number}",
"data": {
CONF_NAME: partition_data[CONF_NAME],
CONF_ARM_HOME_MODE: partition_data.get(
CONF_ARM_HOME_MODE, DEFAULT_CONF_ARM_HOME_MODE
),
CONF_PARTITION_NUMBER: partition_number,
},
}
)
for zone_number, zone_data in import_config.get(CONF_ZONES, {}).items():
subentries.append(
{
"subentry_type": SUBENTRY_TYPE_ZONE,
"title": f"{zone_data[CONF_NAME]} ({zone_number})",
"unique_id": f"{SUBENTRY_TYPE_ZONE}_{zone_number}",
"data": {
CONF_NAME: zone_data[CONF_NAME],
CONF_ZONE_NUMBER: zone_number,
CONF_ZONE_TYPE: zone_data.get(
CONF_ZONE_TYPE, BinarySensorDeviceClass.MOTION
),
},
}
)
for output_number, output_data in import_config.get(
CONF_OUTPUTS, {}
).items():
subentries.append(
{
"subentry_type": SUBENTRY_TYPE_OUTPUT,
"title": f"{output_data[CONF_NAME]} ({output_number})",
"unique_id": f"{SUBENTRY_TYPE_OUTPUT}_{output_number}",
"data": {
CONF_NAME: output_data[CONF_NAME],
CONF_OUTPUT_NUMBER: output_number,
CONF_ZONE_TYPE: output_data.get(
CONF_ZONE_TYPE, BinarySensorDeviceClass.MOTION
),
},
}
)
for switchable_output_number, switchable_output_data in import_config.get(
CONF_SWITCHABLE_OUTPUTS, {}
).items():
subentries.append(
{
"subentry_type": SUBENTRY_TYPE_SWITCHABLE_OUTPUT,
"title": f"{switchable_output_data[CONF_NAME]} ({switchable_output_number})",
"unique_id": f"{SUBENTRY_TYPE_SWITCHABLE_OUTPUT}_{switchable_output_number}",
"data": {
CONF_NAME: switchable_output_data[CONF_NAME],
CONF_SWITCHABLE_OUTPUT_NUMBER: switchable_output_number,
},
}
)
return self.async_create_entry(
title=import_config[CONF_HOST],
data={
CONF_HOST: import_config[CONF_HOST],
CONF_PORT: import_config.get(CONF_PORT, DEFAULT_PORT),
},
options={CONF_CODE: import_config.get(CONF_CODE)},
subentries=subentries,
)
return self.async_abort(reason="cannot_connect")
async def test_connection(self, host: str, port: int) -> bool:
"""Test a connection to the Satel alarm."""
controller = AsyncSatel(host, port, self.hass.loop)
result = await controller.connect()
# Make sure we close the connection again
controller.close()
return result
class SatelOptionsFlow(OptionsFlow):
"""Handle Satel options flow."""
async def async_step_init(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Init step."""
if user_input is not None:
return self.async_create_entry(data={CONF_CODE: user_input.get(CONF_CODE)})
return self.async_show_form(
step_id="init",
data_schema=self.add_suggested_values_to_schema(
CODE_SCHEMA, self.config_entry.options
),
)
class PartitionSubentryFlowHandler(ConfigSubentryFlow):
"""Handle subentry flow for adding and modifying a partition."""
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""User flow to add new partition."""
errors: dict[str, str] = {}
if user_input is not None:
unique_id = f"{SUBENTRY_TYPE_PARTITION}_{user_input[CONF_PARTITION_NUMBER]}"
for existing_subentry in self._get_entry().subentries.values():
if existing_subentry.unique_id == unique_id:
errors[CONF_PARTITION_NUMBER] = "already_configured"
if not errors:
return self.async_create_entry(
title=f"{user_input[CONF_NAME]} ({user_input[CONF_PARTITION_NUMBER]})",
data=user_input,
unique_id=unique_id,
)
return self.async_show_form(
step_id="user",
errors=errors,
data_schema=vol.Schema(
{
vol.Required(CONF_PARTITION_NUMBER): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
}
).extend(PARTITION_SCHEMA.schema),
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Reconfigure existing partition."""
subconfig_entry = self._get_reconfigure_subentry()
if user_input is not None:
return self.async_update_and_abort(
self._get_entry(),
subconfig_entry,
title=f"{user_input[CONF_NAME]} ({subconfig_entry.data[CONF_PARTITION_NUMBER]})",
data_updates=user_input,
)
return self.async_show_form(
step_id="reconfigure",
data_schema=self.add_suggested_values_to_schema(
PARTITION_SCHEMA,
subconfig_entry.data,
),
description_placeholders={
CONF_PARTITION_NUMBER: subconfig_entry.data[CONF_PARTITION_NUMBER]
},
)
class ZoneSubentryFlowHandler(ConfigSubentryFlow):
"""Handle subentry flow for adding and modifying a zone."""
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""User flow to add new zone."""
errors: dict[str, str] = {}
if user_input is not None:
unique_id = f"{SUBENTRY_TYPE_ZONE}_{user_input[CONF_ZONE_NUMBER]}"
for existing_subentry in self._get_entry().subentries.values():
if existing_subentry.unique_id == unique_id:
errors[CONF_ZONE_NUMBER] = "already_configured"
if not errors:
return self.async_create_entry(
title=f"{user_input[CONF_NAME]} ({user_input[CONF_ZONE_NUMBER]})",
data=user_input,
unique_id=unique_id,
)
return self.async_show_form(
step_id="user",
errors=errors,
data_schema=vol.Schema(
{
vol.Required(CONF_ZONE_NUMBER): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
}
).extend(ZONE_AND_OUTPUT_SCHEMA.schema),
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Reconfigure existing zone."""
subconfig_entry = self._get_reconfigure_subentry()
if user_input is not None:
return self.async_update_and_abort(
self._get_entry(),
subconfig_entry,
title=f"{user_input[CONF_NAME]} ({subconfig_entry.data[CONF_ZONE_NUMBER]})",
data_updates=user_input,
)
return self.async_show_form(
step_id="reconfigure",
data_schema=self.add_suggested_values_to_schema(
ZONE_AND_OUTPUT_SCHEMA, subconfig_entry.data
),
description_placeholders={
CONF_ZONE_NUMBER: subconfig_entry.data[CONF_ZONE_NUMBER]
},
)
class OutputSubentryFlowHandler(ConfigSubentryFlow):
"""Handle subentry flow for adding and modifying a output."""
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""User flow to add new output."""
errors: dict[str, str] = {}
if user_input is not None:
unique_id = f"{SUBENTRY_TYPE_OUTPUT}_{user_input[CONF_OUTPUT_NUMBER]}"
for existing_subentry in self._get_entry().subentries.values():
if existing_subentry.unique_id == unique_id:
errors[CONF_OUTPUT_NUMBER] = "already_configured"
if not errors:
return self.async_create_entry(
title=f"{user_input[CONF_NAME]} ({user_input[CONF_OUTPUT_NUMBER]})",
data=user_input,
unique_id=unique_id,
)
return self.async_show_form(
step_id="user",
errors=errors,
data_schema=vol.Schema(
{
vol.Required(CONF_OUTPUT_NUMBER): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
}
).extend(ZONE_AND_OUTPUT_SCHEMA.schema),
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Reconfigure existing output."""
subconfig_entry = self._get_reconfigure_subentry()
if user_input is not None:
return self.async_update_and_abort(
self._get_entry(),
subconfig_entry,
title=f"{user_input[CONF_NAME]} ({subconfig_entry.data[CONF_OUTPUT_NUMBER]})",
data_updates=user_input,
)
return self.async_show_form(
step_id="reconfigure",
data_schema=self.add_suggested_values_to_schema(
ZONE_AND_OUTPUT_SCHEMA, subconfig_entry.data
),
description_placeholders={
CONF_OUTPUT_NUMBER: subconfig_entry.data[CONF_OUTPUT_NUMBER]
},
)
class SwitchableOutputSubentryFlowHandler(ConfigSubentryFlow):
"""Handle subentry flow for adding and modifying a switchable output."""
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""User flow to add new switchable output."""
errors: dict[str, str] = {}
if user_input is not None:
unique_id = f"{SUBENTRY_TYPE_SWITCHABLE_OUTPUT}_{user_input[CONF_SWITCHABLE_OUTPUT_NUMBER]}"
for existing_subentry in self._get_entry().subentries.values():
if existing_subentry.unique_id == unique_id:
errors[CONF_SWITCHABLE_OUTPUT_NUMBER] = "already_configured"
if not errors:
return self.async_create_entry(
title=f"{user_input[CONF_NAME]} ({user_input[CONF_SWITCHABLE_OUTPUT_NUMBER]})",
data=user_input,
unique_id=unique_id,
)
return self.async_show_form(
step_id="user",
errors=errors,
data_schema=vol.Schema(
{
vol.Required(CONF_SWITCHABLE_OUTPUT_NUMBER): vol.All(
vol.Coerce(int), vol.Range(min=1)
),
}
).extend(SWITCHABLE_OUTPUT_SCHEMA.schema),
)
async def async_step_reconfigure(
self, user_input: dict[str, Any] | None = None
) -> SubentryFlowResult:
"""Reconfigure existing switchable output."""
subconfig_entry = self._get_reconfigure_subentry()
if user_input is not None:
return self.async_update_and_abort(
self._get_entry(),
subconfig_entry,
title=f"{user_input[CONF_NAME]} ({subconfig_entry.data[CONF_SWITCHABLE_OUTPUT_NUMBER]})",
data_updates=user_input,
)
return self.async_show_form(
step_id="reconfigure",
data_schema=self.add_suggested_values_to_schema(
SWITCHABLE_OUTPUT_SCHEMA, subconfig_entry.data
),
description_placeholders={
CONF_SWITCHABLE_OUTPUT_NUMBER: subconfig_entry.data[
CONF_SWITCHABLE_OUTPUT_NUMBER
]
},
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/satel_integra/config_flow.py",
"license": "Apache License 2.0",
"lines": 428,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/satel_integra/const.py | """Constants for the Satel Integra integration."""
DEFAULT_CONF_ARM_HOME_MODE = 1
DEFAULT_PORT = 7094
DEFAULT_ZONE_TYPE = "motion"
DOMAIN = "satel_integra"
SUBENTRY_TYPE_PARTITION = "partition"
SUBENTRY_TYPE_ZONE = "zone"
SUBENTRY_TYPE_OUTPUT = "output"
SUBENTRY_TYPE_SWITCHABLE_OUTPUT = "switchable_output"
CONF_PARTITION_NUMBER = "partition_number"
CONF_ZONE_NUMBER = "zone_number"
CONF_OUTPUT_NUMBER = "output_number"
CONF_SWITCHABLE_OUTPUT_NUMBER = "switchable_output_number"
CONF_DEVICE_PARTITIONS = "partitions"
CONF_ARM_HOME_MODE = "arm_home_mode"
CONF_ZONE_TYPE = "type"
CONF_ZONES = "zones"
CONF_OUTPUTS = "outputs"
CONF_SWITCHABLE_OUTPUTS = "switchable_outputs"
ZONES = "zones"
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/satel_integra/const.py",
"license": "Apache License 2.0",
"lines": 20,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/satel_integra/diagnostics.py | """Diagnostics support for Satel Integra."""
from __future__ import annotations
from typing import Any
from homeassistant.components.diagnostics import async_redact_data
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_CODE
from homeassistant.core import HomeAssistant
TO_REDACT = {CONF_CODE}
async def async_get_config_entry_diagnostics(
hass: HomeAssistant, entry: ConfigEntry
) -> dict[str, Any]:
"""Return diagnostics for the config entry."""
diag: dict[str, Any] = {}
diag["config_entry_data"] = dict(entry.data)
diag["config_entry_options"] = async_redact_data(entry.options, TO_REDACT)
diag["subentries"] = dict(entry.subentries)
return diag
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/satel_integra/diagnostics.py",
"license": "Apache License 2.0",
"lines": 17,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/sftp_storage/backup.py | """Backup platform for the SFTP Storage integration."""
from __future__ import annotations
from collections.abc import AsyncIterator, Callable, Coroutine
from typing import Any
from asyncssh.sftp import SFTPError
from homeassistant.components.backup import (
AgentBackup,
BackupAgent,
BackupAgentError,
BackupNotFound,
)
from homeassistant.core import HomeAssistant, callback
from . import SFTPConfigEntry
from .client import BackupAgentClient
from .const import DATA_BACKUP_AGENT_LISTENERS, DOMAIN, LOGGER
async def async_get_backup_agents(
hass: HomeAssistant,
) -> list[BackupAgent]:
"""Register the backup agents."""
entries: list[SFTPConfigEntry] = hass.config_entries.async_loaded_entries(DOMAIN)
return [SFTPBackupAgent(hass, entry) for entry in entries]
@callback
def async_register_backup_agents_listener(
hass: HomeAssistant,
*,
listener: Callable[[], None],
**kwargs: Any,
) -> Callable[[], None]:
"""Register a listener to be called when agents are added or removed."""
hass.data.setdefault(DATA_BACKUP_AGENT_LISTENERS, []).append(listener)
@callback
def remove_listener() -> None:
"""Remove the listener."""
hass.data[DATA_BACKUP_AGENT_LISTENERS].remove(listener)
if not hass.data[DATA_BACKUP_AGENT_LISTENERS]:
del hass.data[DATA_BACKUP_AGENT_LISTENERS]
return remove_listener
class SFTPBackupAgent(BackupAgent):
"""SFTP Backup Storage agent."""
domain = DOMAIN
def __init__(self, hass: HomeAssistant, entry: SFTPConfigEntry) -> None:
"""Initialize the SFTPBackupAgent backup sync agent."""
super().__init__()
self._entry: SFTPConfigEntry = entry
self._hass: HomeAssistant = hass
self.name: str = entry.title
self.unique_id: str = entry.entry_id
async def async_download_backup(
self,
backup_id: str,
**kwargs: Any,
) -> AsyncIterator[bytes]:
"""Download a backup file from SFTP."""
LOGGER.debug(
"Establishing SFTP connection to remote host in order to download backup id: %s",
backup_id,
)
try:
# Will raise BackupAgentError if failure to authenticate or SFTP Permissions
async with BackupAgentClient(self._entry, self._hass) as client:
return await client.iter_file(backup_id)
except FileNotFoundError as e:
raise BackupNotFound(
f"Unable to initiate download of backup id: {backup_id}. {e}"
) from e
async def async_upload_backup(
self,
*,
open_stream: Callable[[], Coroutine[Any, Any, AsyncIterator[bytes]]],
backup: AgentBackup,
**kwargs: Any,
) -> None:
"""Upload a backup."""
LOGGER.debug("Received request to upload backup: %s", backup)
iterator = await open_stream()
LOGGER.debug(
"Establishing SFTP connection to remote host in order to upload backup"
)
# Will raise BackupAgentError if failure to authenticate or SFTP Permissions
async with BackupAgentClient(self._entry, self._hass) as client:
LOGGER.debug("Uploading backup: %s", backup.backup_id)
await client.async_upload_backup(iterator, backup)
LOGGER.debug("Successfully uploaded backup id: %s", backup.backup_id)
async def async_delete_backup(
self,
backup_id: str,
**kwargs: Any,
) -> None:
"""Delete a backup file from SFTP Storage."""
LOGGER.debug("Received request to delete backup id: %s", backup_id)
try:
LOGGER.debug(
"Establishing SFTP connection to remote host in order to delete backup"
)
# Will raise BackupAgentError if failure to authenticate or SFTP Permissions
async with BackupAgentClient(self._entry, self._hass) as client:
await client.async_delete_backup(backup_id)
except FileNotFoundError as err:
raise BackupNotFound(str(err)) from err
except SFTPError as err:
raise BackupAgentError(
f"Failed to delete backup id: {backup_id}: {err}"
) from err
LOGGER.debug("Successfully removed backup id: %s", backup_id)
async def async_list_backups(self, **kwargs: Any) -> list[AgentBackup]:
"""List backups stored on SFTP Storage."""
# Will raise BackupAgentError if failure to authenticate or SFTP Permissions
async with BackupAgentClient(self._entry, self._hass) as client:
try:
return await client.async_list_backups()
except SFTPError as err:
raise BackupAgentError(
f"Remote server error while attempting to list backups: {err}"
) from err
async def async_get_backup(
self,
backup_id: str,
**kwargs: Any,
) -> AgentBackup:
"""Return a backup."""
backups = await self.async_list_backups()
for backup in backups:
if backup.backup_id == backup_id:
LOGGER.debug("Returning backup id: %s. %s", backup_id, backup)
return backup
raise BackupNotFound(f"Backup id: {backup_id} not found")
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/sftp_storage/backup.py",
"license": "Apache License 2.0",
"lines": 126,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/sftp_storage/client.py | """Client for SFTP Storage integration."""
from __future__ import annotations
from collections.abc import AsyncIterator
from dataclasses import dataclass
import json
from types import TracebackType
from typing import TYPE_CHECKING, Self
from asyncssh import (
SFTPClient,
SFTPClientFile,
SSHClientConnection,
SSHClientConnectionOptions,
connect,
)
from asyncssh.misc import PermissionDenied
from asyncssh.sftp import SFTPNoSuchFile, SFTPPermissionDenied
from homeassistant.components.backup import (
AgentBackup,
BackupAgentError,
suggested_filename,
)
from homeassistant.core import HomeAssistant
from .const import BUF_SIZE, LOGGER
if TYPE_CHECKING:
from . import SFTPConfigEntry, SFTPConfigEntryData
def get_client_options(cfg: SFTPConfigEntryData) -> SSHClientConnectionOptions:
"""Use this function with `hass.async_add_executor_job` to asynchronously get `SSHClientConnectionOptions`."""
return SSHClientConnectionOptions(
known_hosts=None,
username=cfg.username,
password=cfg.password,
client_keys=cfg.private_key_file,
)
class AsyncFileIterator:
"""Returns iterator of remote file located in SFTP Server.
This exists in order to properly close remote file after operation is completed
and to avoid premature closing of file and session if `BackupAgentClient` is used
as context manager.
"""
_client: BackupAgentClient
_fileobj: SFTPClientFile
def __init__(
self,
cfg: SFTPConfigEntry,
hass: HomeAssistant,
file_path: str,
buffer_size: int = BUF_SIZE,
) -> None:
"""Initialize `AsyncFileIterator`."""
self.cfg: SFTPConfigEntry = cfg
self.hass: HomeAssistant = hass
self.file_path: str = file_path
self.buffer_size = buffer_size
self._initialized: bool = False
LOGGER.debug("Opening file: %s in Async File Iterator", file_path)
async def _initialize(self) -> None:
"""Load file object."""
self._client: BackupAgentClient = await BackupAgentClient(
self.cfg, self.hass
).open()
self._fileobj: SFTPClientFile = await self._client.sftp.open(
self.file_path, "rb"
)
self._initialized = True
def __aiter__(self) -> AsyncIterator[bytes]:
"""Return self as iterator."""
return self
async def __anext__(self) -> bytes:
"""Return next bytes as provided in buffer size."""
if not self._initialized:
await self._initialize()
chunk: bytes = await self._fileobj.read(self.buffer_size)
if not chunk:
try:
await self._fileobj.close()
await self._client.close()
finally:
raise StopAsyncIteration
return chunk
@dataclass(kw_only=True)
class BackupMetadata:
"""Represent single backup file metadata."""
file_path: str
metadata: dict[str, str | dict[str, list[str]]]
metadata_file: str
class BackupAgentClient:
"""Helper class that manages SSH and SFTP Server connections."""
sftp: SFTPClient
def __init__(self, config: SFTPConfigEntry, hass: HomeAssistant) -> None:
"""Initialize `BackupAgentClient`."""
self.cfg: SFTPConfigEntry = config
self.hass: HomeAssistant = hass
self._ssh: SSHClientConnection | None = None
LOGGER.debug("Initialized with config: %s", self.cfg.runtime_data)
async def __aenter__(self) -> Self:
"""Async context manager entrypoint."""
return await self.open() # type: ignore[return-value] # mypy will otherwise raise an error
async def __aexit__(
self,
exc_type: type[BaseException] | None,
exc: BaseException | None,
traceback: TracebackType | None,
) -> None:
"""Async Context Manager exit routine."""
if self.sftp:
self.sftp.exit()
await self.sftp.wait_closed()
if self._ssh:
self._ssh.close()
await self._ssh.wait_closed()
async def _load_metadata(self, backup_id: str) -> BackupMetadata:
"""Return `BackupMetadata` object`.
Raises:
------
`FileNotFoundError` -- if metadata file is not found.
"""
# Test for metadata file existence.
metadata_file = (
f"{self.cfg.runtime_data.backup_location}/.{backup_id}.metadata.json"
)
if not await self.sftp.exists(metadata_file):
raise FileNotFoundError(
f"Metadata file not found at remote location: {metadata_file}"
)
async with self.sftp.open(metadata_file, "r") as f:
return BackupMetadata(
**json.loads(await f.read()), metadata_file=metadata_file
)
async def async_delete_backup(self, backup_id: str) -> None:
"""Delete backup archive.
Raises:
------
`FileNotFoundError` -- if either metadata file or archive is not found.
"""
metadata: BackupMetadata = await self._load_metadata(backup_id)
# If for whatever reason, archive does not exist but metadata file does,
# remove the metadata file.
if not await self.sftp.exists(metadata.file_path):
await self.sftp.unlink(metadata.metadata_file)
raise FileNotFoundError(
f"File at provided remote location: {metadata.file_path} does not exist."
)
LOGGER.debug("Removing file at path: %s", metadata.file_path)
await self.sftp.unlink(metadata.file_path)
LOGGER.debug("Removing metadata at path: %s", metadata.metadata_file)
await self.sftp.unlink(metadata.metadata_file)
async def async_list_backups(self) -> list[AgentBackup]:
"""Iterate through a list of metadata files and return a list of `AgentBackup` objects."""
backups: list[AgentBackup] = []
for file in await self.list_backup_location():
LOGGER.debug(
"Evaluating metadata file at remote location: %s@%s:%s",
self.cfg.runtime_data.username,
self.cfg.runtime_data.host,
file,
)
try:
async with self.sftp.open(file, "r") as rfile:
metadata = BackupMetadata(
**json.loads(await rfile.read()), metadata_file=file
)
backups.append(AgentBackup.from_dict(metadata.metadata))
except (json.JSONDecodeError, TypeError) as e:
LOGGER.error(
"Failed to load backup metadata from file: %s. %s", file, str(e)
)
continue
return backups
async def async_upload_backup(
self,
iterator: AsyncIterator[bytes],
backup: AgentBackup,
) -> None:
"""Accept `iterator` as bytes iterator and write backup archive to SFTP Server."""
file_path = (
f"{self.cfg.runtime_data.backup_location}/{suggested_filename(backup)}"
)
async with self.sftp.open(file_path, "wb") as f:
async for b in iterator:
await f.write(b)
LOGGER.debug("Writing backup metadata")
metadata: dict[str, str | dict[str, list[str]]] = {
"file_path": file_path,
"metadata": backup.as_dict(),
}
async with self.sftp.open(
f"{self.cfg.runtime_data.backup_location}/.{backup.backup_id}.metadata.json",
"w",
) as f:
await f.write(json.dumps(metadata))
async def close(self) -> None:
"""Close the `BackupAgentClient` context manager."""
await self.__aexit__(None, None, None)
async def iter_file(self, backup_id: str) -> AsyncFileIterator:
"""Return Async File Iterator object.
`SFTPClientFile` object (that would be returned with `sftp.open`) is not an iterator.
So we return custom made class - `AsyncFileIterator` that would allow iteration on file object.
Raises:
------
- `FileNotFoundError` -- if metadata or backup archive is not found.
"""
metadata: BackupMetadata = await self._load_metadata(backup_id)
if not await self.sftp.exists(metadata.file_path):
raise FileNotFoundError("Backup archive not found on remote location.")
return AsyncFileIterator(self.cfg, self.hass, metadata.file_path, BUF_SIZE)
async def list_backup_location(self) -> list[str]:
"""Return a list of `*.metadata.json` files located in backup location."""
files = []
LOGGER.debug(
"Changing directory to: `%s`", self.cfg.runtime_data.backup_location
)
await self.sftp.chdir(self.cfg.runtime_data.backup_location)
for file in await self.sftp.listdir():
LOGGER.debug(
"Checking if file: `%s/%s` is metadata file",
self.cfg.runtime_data.backup_location,
file,
)
if file.endswith(".metadata.json"):
LOGGER.debug("Found metadata file: `%s`", file)
files.append(f"{self.cfg.runtime_data.backup_location}/{file}")
return files
async def open(self) -> BackupAgentClient:
"""Return initialized `BackupAgentClient`.
This is to avoid calling `__aenter__` dunder method.
"""
# Configure SSH Client Connection
try:
self._ssh = await connect(
host=self.cfg.runtime_data.host,
port=self.cfg.runtime_data.port,
options=await self.hass.async_add_executor_job(
get_client_options, self.cfg.runtime_data
),
)
except (OSError, PermissionDenied) as e:
raise BackupAgentError(
"Failure while attempting to establish SSH connection. Please check SSH credentials and if changed, re-install the integration"
) from e
# Configure SFTP Client Connection
try:
self.sftp = await self._ssh.start_sftp_client()
await self.sftp.chdir(self.cfg.runtime_data.backup_location)
except (SFTPNoSuchFile, SFTPPermissionDenied) as e:
raise BackupAgentError(
"Failed to create SFTP client. Re-installing integration might be required"
) from e
return self
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/sftp_storage/client.py",
"license": "Apache License 2.0",
"lines": 248,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/sftp_storage/config_flow.py | """Config flow to configure the SFTP Storage integration."""
from __future__ import annotations
from contextlib import suppress
from pathlib import Path
import shutil
from typing import Any, cast
from asyncssh import KeyImportError, SSHClientConnectionOptions, connect
from asyncssh.misc import PermissionDenied
from asyncssh.sftp import SFTPNoSuchFile, SFTPPermissionDenied
import voluptuous as vol
from homeassistant.components.file_upload import process_uploaded_file
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.core import HomeAssistant
from homeassistant.helpers.selector import (
FileSelector,
FileSelectorConfig,
TextSelector,
TextSelectorConfig,
TextSelectorType,
)
from homeassistant.helpers.storage import STORAGE_DIR
from homeassistant.util.ulid import ulid
from . import SFTPConfigEntryData
from .client import get_client_options
from .const import (
CONF_BACKUP_LOCATION,
CONF_HOST,
CONF_PASSWORD,
CONF_PORT,
CONF_PRIVATE_KEY_FILE,
CONF_USERNAME,
DEFAULT_PKEY_NAME,
DOMAIN,
LOGGER,
)
DATA_SCHEMA = vol.Schema(
{
vol.Required(CONF_HOST): str,
vol.Required(CONF_PORT, default=22): int,
vol.Required(CONF_USERNAME): str,
vol.Optional(CONF_PASSWORD): TextSelector(
config=TextSelectorConfig(type=TextSelectorType.PASSWORD)
),
vol.Optional(CONF_PRIVATE_KEY_FILE): FileSelector(
FileSelectorConfig(accept="*")
),
vol.Required(CONF_BACKUP_LOCATION): str,
}
)
class SFTPStorageException(Exception):
"""Base exception for SFTP Storage integration."""
class SFTPStorageInvalidPrivateKey(SFTPStorageException):
"""Exception raised during config flow - when user provided invalid private key file."""
class SFTPStorageMissingPasswordOrPkey(SFTPStorageException):
"""Exception raised during config flow - when user did not provide password or private key file."""
class SFTPFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle an SFTP Storage config flow."""
def __init__(self) -> None:
"""Initialize SFTP Storage Flow Handler."""
self._client_keys: list = []
async def _validate_auth_and_save_keyfile(
self, user_input: dict[str, Any]
) -> dict[str, Any]:
"""Validate authentication input and persist uploaded key file.
Ensures that at least one of password or private key is provided. When a
private key is supplied, the uploaded file is saved to Home Assistant's
config storage and `user_input[CONF_PRIVATE_KEY_FILE]` is replaced with
the stored path.
Returns: the possibly updated `user_input`.
Raises:
- SFTPStorageMissingPasswordOrPkey: Neither password nor private key provided
- SFTPStorageInvalidPrivateKey: The provided private key has an invalid format
"""
# If neither password nor private key is provided, error out;
# we need at least one to perform authentication.
if not (user_input.get(CONF_PASSWORD) or user_input.get(CONF_PRIVATE_KEY_FILE)):
raise SFTPStorageMissingPasswordOrPkey
if key_file := user_input.get(CONF_PRIVATE_KEY_FILE):
client_key = await save_uploaded_pkey_file(self.hass, cast(str, key_file))
LOGGER.debug("Saved client key: %s", client_key)
user_input[CONF_PRIVATE_KEY_FILE] = client_key
return user_input
async def async_step_user(
self,
user_input: dict[str, Any] | None = None,
step_id: str = "user",
) -> ConfigFlowResult:
"""Handle a flow initiated by the user."""
errors: dict[str, str] = {}
placeholders: dict[str, str] = {}
if user_input is not None:
LOGGER.debug("Source: %s", self.source)
self._async_abort_entries_match(
{
CONF_HOST: user_input[CONF_HOST],
CONF_PORT: user_input[CONF_PORT],
CONF_BACKUP_LOCATION: user_input[CONF_BACKUP_LOCATION],
}
)
if not user_input[CONF_BACKUP_LOCATION].startswith("/"):
errors[CONF_BACKUP_LOCATION] = "backup_location_relative"
return self.async_show_form(
step_id=step_id,
data_schema=self.add_suggested_values_to_schema(
DATA_SCHEMA, user_input
),
description_placeholders=placeholders,
errors=errors,
)
try:
# Validate auth input and save uploaded key file if provided
user_input = await self._validate_auth_and_save_keyfile(user_input)
# Create a session using your credentials
user_config = SFTPConfigEntryData(
host=user_input[CONF_HOST],
port=user_input[CONF_PORT],
username=user_input[CONF_USERNAME],
password=user_input.get(CONF_PASSWORD),
private_key_file=user_input.get(CONF_PRIVATE_KEY_FILE),
backup_location=user_input[CONF_BACKUP_LOCATION],
)
placeholders["backup_location"] = user_config.backup_location
# Raises:
# - OSError, if host or port are not correct.
# - SFTPStorageInvalidPrivateKey, if private key is not valid format.
# - asyncssh.misc.PermissionDenied, if credentials are not correct.
# - SFTPStorageMissingPasswordOrPkey, if password and private key are not provided.
# - asyncssh.sftp.SFTPNoSuchFile, if directory does not exist.
# - asyncssh.sftp.SFTPPermissionDenied, if we don't have access to said directory
async with (
connect(
host=user_config.host,
port=user_config.port,
options=await self.hass.async_add_executor_job(
get_client_options, user_config
),
) as ssh,
ssh.start_sftp_client() as sftp,
):
await sftp.chdir(user_config.backup_location)
await sftp.listdir()
LOGGER.debug(
"Will register SFTP Storage agent with user@host %s@%s",
user_config.host,
user_config.username,
)
except OSError as e:
LOGGER.exception(e)
placeholders["error_message"] = str(e)
errors["base"] = "os_error"
except SFTPStorageInvalidPrivateKey:
errors["base"] = "invalid_key"
except PermissionDenied as e:
placeholders["error_message"] = str(e)
errors["base"] = "permission_denied"
except SFTPStorageMissingPasswordOrPkey:
errors["base"] = "key_or_password_needed"
except SFTPNoSuchFile:
errors["base"] = "sftp_no_such_file"
except SFTPPermissionDenied:
errors["base"] = "sftp_permission_denied"
except Exception as e: # noqa: BLE001
LOGGER.exception(e)
placeholders["error_message"] = str(e)
placeholders["exception"] = type(e).__name__
errors["base"] = "unknown"
else:
return self.async_create_entry(
title=f"{user_config.username}@{user_config.host}",
data=user_input,
)
finally:
# We remove the saved private key file if any error occurred.
if errors and bool(user_input.get(CONF_PRIVATE_KEY_FILE)):
keyfile = Path(user_input[CONF_PRIVATE_KEY_FILE])
keyfile.unlink(missing_ok=True)
with suppress(OSError):
keyfile.parent.rmdir()
if user_input:
user_input.pop(CONF_PRIVATE_KEY_FILE, None)
return self.async_show_form(
step_id=step_id,
data_schema=self.add_suggested_values_to_schema(DATA_SCHEMA, user_input),
description_placeholders=placeholders,
errors=errors,
)
async def save_uploaded_pkey_file(hass: HomeAssistant, uploaded_file_id: str) -> str:
"""Validate the uploaded private key and move it to the storage directory.
Return a string representing a path to private key file.
Raises SFTPStorageInvalidPrivateKey if the file is invalid.
"""
def _process_upload() -> str:
with process_uploaded_file(hass, uploaded_file_id) as file_path:
try:
# Initializing this will verify if private key is in correct format
SSHClientConnectionOptions(client_keys=[file_path])
except KeyImportError as err:
LOGGER.debug(err)
raise SFTPStorageInvalidPrivateKey from err
dest_path = Path(hass.config.path(STORAGE_DIR, DOMAIN))
dest_file = dest_path / f".{ulid()}_{DEFAULT_PKEY_NAME}"
# Create parent directory
dest_file.parent.mkdir(exist_ok=True)
return str(shutil.move(file_path, dest_file))
return await hass.async_add_executor_job(_process_upload)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/sftp_storage/config_flow.py",
"license": "Apache License 2.0",
"lines": 205,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/sftp_storage/const.py | """Constants for the SFTP Storage integration."""
from __future__ import annotations
from collections.abc import Callable
import logging
from typing import Final
from homeassistant.util.hass_dict import HassKey
DOMAIN: Final = "sftp_storage"
LOGGER = logging.getLogger(__package__)
CONF_HOST: Final = "host"
CONF_PORT: Final = "port"
CONF_USERNAME: Final = "username"
CONF_PASSWORD: Final = "password"
CONF_PRIVATE_KEY_FILE: Final = "private_key_file"
CONF_BACKUP_LOCATION: Final = "backup_location"
BUF_SIZE = 2**20 * 4 # 4MB
DATA_BACKUP_AGENT_LISTENERS: HassKey[list[Callable[[], None]]] = HassKey(
f"{DOMAIN}.backup_agent_listeners"
)
DEFAULT_PKEY_NAME: str = "sftp_storage_pkey"
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/sftp_storage/const.py",
"license": "Apache License 2.0",
"lines": 19,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/sonos/services.py | """Support to interface with Sonos players."""
from __future__ import annotations
import voluptuous as vol
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
from homeassistant.const import ATTR_TIME
from homeassistant.core import HomeAssistant, ServiceCall, SupportsResponse, callback
from homeassistant.helpers import config_validation as cv, service
from homeassistant.helpers.entity_platform import DATA_DOMAIN_PLATFORM_ENTITIES
from .const import ATTR_QUEUE_POSITION, DOMAIN
from .media_player import SonosMediaPlayerEntity
from .speaker import SonosSpeaker
SERVICE_SNAPSHOT = "snapshot"
SERVICE_RESTORE = "restore"
SERVICE_SET_TIMER = "set_sleep_timer"
SERVICE_CLEAR_TIMER = "clear_sleep_timer"
SERVICE_UPDATE_ALARM = "update_alarm"
SERVICE_PLAY_QUEUE = "play_queue"
SERVICE_REMOVE_FROM_QUEUE = "remove_from_queue"
SERVICE_GET_QUEUE = "get_queue"
ATTR_SLEEP_TIME = "sleep_time"
ATTR_ALARM_ID = "alarm_id"
ATTR_VOLUME = "volume"
ATTR_ENABLED = "enabled"
ATTR_INCLUDE_LINKED_ZONES = "include_linked_zones"
ATTR_WITH_GROUP = "with_group"
@callback
def async_setup_services(hass: HomeAssistant) -> None:
"""Register Sonos services."""
@service.verify_domain_control(DOMAIN)
async def async_service_handle(service_call: ServiceCall) -> None:
"""Handle dispatched services."""
platform_entities = hass.data.get(DATA_DOMAIN_PLATFORM_ENTITIES, {}).get(
(MEDIA_PLAYER_DOMAIN, DOMAIN), {}
)
entities = await service.async_extract_entities(
platform_entities.values(), service_call
)
if not entities:
return
speakers: list[SonosSpeaker] = []
for entity in entities:
assert isinstance(entity, SonosMediaPlayerEntity)
speakers.append(entity.speaker)
config_entry = speakers[0].config_entry # All speakers share the same entry
if service_call.service == SERVICE_SNAPSHOT:
await SonosSpeaker.snapshot_multi(
hass, config_entry, speakers, service_call.data[ATTR_WITH_GROUP]
)
elif service_call.service == SERVICE_RESTORE:
await SonosSpeaker.restore_multi(
hass, config_entry, speakers, service_call.data[ATTR_WITH_GROUP]
)
join_unjoin_schema = cv.make_entity_service_schema(
{vol.Optional(ATTR_WITH_GROUP, default=True): cv.boolean}
)
hass.services.async_register(
DOMAIN, SERVICE_SNAPSHOT, async_service_handle, join_unjoin_schema
)
hass.services.async_register(
DOMAIN, SERVICE_RESTORE, async_service_handle, join_unjoin_schema
)
service.async_register_platform_entity_service(
hass,
DOMAIN,
SERVICE_SET_TIMER,
entity_domain=MEDIA_PLAYER_DOMAIN,
schema={
vol.Required(ATTR_SLEEP_TIME): vol.All(
vol.Coerce(int), vol.Range(min=0, max=86399)
)
},
func="set_sleep_timer",
)
service.async_register_platform_entity_service(
hass,
DOMAIN,
SERVICE_CLEAR_TIMER,
entity_domain=MEDIA_PLAYER_DOMAIN,
schema=None,
func="clear_sleep_timer",
)
service.async_register_platform_entity_service(
hass,
DOMAIN,
SERVICE_UPDATE_ALARM,
entity_domain=MEDIA_PLAYER_DOMAIN,
schema={
vol.Required(ATTR_ALARM_ID): cv.positive_int,
vol.Optional(ATTR_TIME): cv.time,
vol.Optional(ATTR_VOLUME): cv.small_float,
vol.Optional(ATTR_ENABLED): cv.boolean,
vol.Optional(ATTR_INCLUDE_LINKED_ZONES): cv.boolean,
},
func="set_alarm",
)
service.async_register_platform_entity_service(
hass,
DOMAIN,
SERVICE_PLAY_QUEUE,
entity_domain=MEDIA_PLAYER_DOMAIN,
schema={vol.Optional(ATTR_QUEUE_POSITION): cv.positive_int},
func="play_queue",
)
service.async_register_platform_entity_service(
hass,
DOMAIN,
SERVICE_REMOVE_FROM_QUEUE,
entity_domain=MEDIA_PLAYER_DOMAIN,
schema={vol.Optional(ATTR_QUEUE_POSITION): cv.positive_int},
func="remove_from_queue",
)
service.async_register_platform_entity_service(
hass,
DOMAIN,
SERVICE_GET_QUEUE,
entity_domain=MEDIA_PLAYER_DOMAIN,
schema=None,
func="get_queue",
supports_response=SupportsResponse.ONLY,
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/sonos/services.py",
"license": "Apache License 2.0",
"lines": 120,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/switchbot_cloud/humidifier.py | """Support for Switchbot humidifier."""
import asyncio
from typing import Any
from switchbot_api import CommonCommands, HumidifierCommands, HumidifierV2Commands
from homeassistant.components.humidifier import (
MODE_AUTO,
MODE_NORMAL,
HumidifierDeviceClass,
HumidifierEntity,
HumidifierEntityFeature,
)
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import STATE_ON
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import SwitchbotCloudData
from .const import AFTER_COMMAND_REFRESH, DOMAIN, HUMIDITY_LEVELS, Humidifier2Mode
from .entity import SwitchBotCloudEntity
PARALLEL_UPDATES = 0
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Switchbot based on a config entry."""
data: SwitchbotCloudData = hass.data[DOMAIN][entry.entry_id]
async_add_entities(
SwitchBotHumidifier(data.api, device, coordinator)
if device.device_type == "Humidifier"
else SwitchBotEvaporativeHumidifier(data.api, device, coordinator)
for device, coordinator in data.devices.humidifiers
)
class SwitchBotHumidifier(SwitchBotCloudEntity, HumidifierEntity):
"""Representation of a Switchbot humidifier."""
_attr_supported_features = HumidifierEntityFeature.MODES
_attr_device_class = HumidifierDeviceClass.HUMIDIFIER
_attr_available_modes = [MODE_NORMAL, MODE_AUTO]
_attr_min_humidity = 1
_attr_translation_key = "humidifier"
_attr_name = None
_attr_target_humidity = 50
def _set_attributes(self) -> None:
"""Set attributes from coordinator data."""
if coord_data := self.coordinator.data:
self._attr_is_on = coord_data.get("power") == STATE_ON
self._attr_mode = MODE_AUTO if coord_data.get("auto") else MODE_NORMAL
self._attr_current_humidity = coord_data.get("humidity")
async def async_set_humidity(self, humidity: int) -> None:
"""Set new target humidity."""
self.target_humidity, parameters = self._map_humidity_to_supported_level(
humidity
)
await self.send_api_command(
HumidifierCommands.SET_MODE, parameters=str(parameters)
)
await asyncio.sleep(AFTER_COMMAND_REFRESH)
await self.coordinator.async_request_refresh()
async def async_set_mode(self, mode: str) -> None:
"""Set new target humidity."""
if mode == MODE_AUTO:
await self.send_api_command(HumidifierCommands.SET_MODE, parameters=mode)
else:
await self.send_api_command(
HumidifierCommands.SET_MODE, parameters=str(102)
)
await asyncio.sleep(AFTER_COMMAND_REFRESH)
await self.coordinator.async_request_refresh()
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the device on."""
await self.send_api_command(CommonCommands.ON)
await asyncio.sleep(AFTER_COMMAND_REFRESH)
await self.coordinator.async_request_refresh()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the device off."""
await self.send_api_command(CommonCommands.OFF)
await asyncio.sleep(AFTER_COMMAND_REFRESH)
await self.coordinator.async_request_refresh()
def _map_humidity_to_supported_level(self, humidity: int) -> tuple[int, int]:
"""Map any humidity to the closest supported level and its parameter."""
if humidity <= 34:
return 34, HUMIDITY_LEVELS[34]
if humidity <= 67:
return 67, HUMIDITY_LEVELS[67]
return 100, HUMIDITY_LEVELS[100]
class SwitchBotEvaporativeHumidifier(SwitchBotCloudEntity, HumidifierEntity):
"""Representation of a Switchbot humidifier v2."""
_attr_supported_features = HumidifierEntityFeature.MODES
_attr_device_class = HumidifierDeviceClass.HUMIDIFIER
_attr_available_modes = Humidifier2Mode.get_modes()
_attr_translation_key = "evaporative_humidifier"
_attr_name = None
_attr_target_humidity = 50
def _set_attributes(self) -> None:
"""Set attributes from coordinator data."""
if coord_data := self.coordinator.data:
self._attr_is_on = coord_data.get("power") == STATE_ON
self._attr_mode = (
Humidifier2Mode(coord_data.get("mode")).name.lower()
if coord_data.get("mode") is not None
else None
)
self._attr_current_humidity = (
coord_data.get("humidity")
if coord_data.get("humidity") != 127
else None
)
async def async_set_humidity(self, humidity: int) -> None:
"""Set new target humidity."""
assert self.coordinator.data is not None
self._attr_target_humidity = humidity
params = {"mode": self.coordinator.data["mode"], "humidity": humidity}
await self.send_api_command(HumidifierV2Commands.SET_MODE, parameters=params)
await asyncio.sleep(AFTER_COMMAND_REFRESH)
await self.coordinator.async_request_refresh()
async def async_set_mode(self, mode: str) -> None:
"""Set new target mode."""
assert self.coordinator.data is not None
params = {"mode": Humidifier2Mode[mode.upper()].value}
await self.send_api_command(HumidifierV2Commands.SET_MODE, parameters=params)
await asyncio.sleep(AFTER_COMMAND_REFRESH)
await self.coordinator.async_request_refresh()
async def async_turn_on(self, **kwargs: Any) -> None:
"""Turn the device on."""
await self.send_api_command(CommonCommands.ON)
await asyncio.sleep(AFTER_COMMAND_REFRESH)
await self.coordinator.async_request_refresh()
async def async_turn_off(self, **kwargs: Any) -> None:
"""Turn the device off."""
await self.send_api_command(CommonCommands.OFF)
await asyncio.sleep(AFTER_COMMAND_REFRESH)
await self.coordinator.async_request_refresh()
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/switchbot_cloud/humidifier.py",
"license": "Apache License 2.0",
"lines": 131,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/tasmota/camera.py | """Support for Tasmota Camera."""
from __future__ import annotations
import asyncio
import logging
from typing import Any
import aiohttp
from hatasmota import camera as tasmota_camera
from hatasmota.entity import TasmotaEntity as HATasmotaEntity
from hatasmota.models import DiscoveryHashType
from homeassistant.components import camera
from homeassistant.components.camera import Camera
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import (
async_aiohttp_proxy_web,
async_get_clientsession,
)
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DATA_REMOVE_DISCOVER_COMPONENT
from .discovery import TASMOTA_DISCOVERY_ENTITY_NEW
from .entity import TasmotaAvailability, TasmotaDiscoveryUpdate, TasmotaEntity
TIMEOUT = 10
_LOGGER = logging.getLogger(__name__)
async def async_setup_entry(
hass: HomeAssistant,
entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up Tasmota light dynamically through discovery."""
@callback
def async_discover(
tasmota_entity: HATasmotaEntity, discovery_hash: DiscoveryHashType
) -> None:
"""Discover and add a Tasmota camera."""
async_add_entities(
[
TasmotaCamera(
tasmota_entity=tasmota_entity, discovery_hash=discovery_hash
)
]
)
hass.data[DATA_REMOVE_DISCOVER_COMPONENT.format(camera.DOMAIN)] = (
async_dispatcher_connect(
hass,
TASMOTA_DISCOVERY_ENTITY_NEW.format(camera.DOMAIN),
async_discover,
)
)
class TasmotaCamera(
TasmotaAvailability,
TasmotaDiscoveryUpdate,
TasmotaEntity,
Camera,
):
"""Representation of a Tasmota Camera."""
_tasmota_entity: tasmota_camera.TasmotaCamera
def __init__(self, **kwds: Any) -> None:
"""Initialize."""
super().__init__(**kwds)
Camera.__init__(self)
async def async_camera_image(
self, width: int | None = None, height: int | None = None
) -> bytes | None:
"""Return a still image response from the camera."""
websession = async_get_clientsession(self.hass)
try:
async with asyncio.timeout(TIMEOUT):
response = await self._tasmota_entity.get_still_image_stream(websession)
return await response.read()
except TimeoutError as err:
raise HomeAssistantError(
f"Timeout getting camera image from {self.name}: {err}"
) from err
except aiohttp.ClientError as err:
raise HomeAssistantError(
f"Error getting new camera image from {self.name}: {err}"
) from err
return None
async def handle_async_mjpeg_stream(
self, request: aiohttp.web.Request
) -> aiohttp.web.StreamResponse | None:
"""Generate an HTTP MJPEG stream from the camera."""
# connect to stream
websession = async_get_clientsession(self.hass)
stream_coro = self._tasmota_entity.get_mjpeg_stream(websession)
return await async_aiohttp_proxy_web(self.hass, request, stream_coro)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/tasmota/camera.py",
"license": "Apache License 2.0",
"lines": 88,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/togrill/select.py | """Support for select entities."""
from __future__ import annotations
from collections.abc import Callable, Generator, Mapping
from dataclasses import dataclass
from enum import Enum
from typing import Any, TypeVar
from togrill_bluetooth.packets import (
GrillType,
PacketA8Notify,
PacketA303Write,
PacketWrite,
Taste,
)
from homeassistant.components.select import SelectEntity, SelectEntityDescription
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from . import ToGrillConfigEntry
from .const import CONF_PROBE_COUNT, MAX_PROBE_COUNT
from .coordinator import ToGrillCoordinator
from .entity import ToGrillEntity
PARALLEL_UPDATES = 0
OPTION_NONE = "none"
@dataclass(kw_only=True, frozen=True)
class ToGrillSelectEntityDescription(SelectEntityDescription):
"""Description of entity."""
get_value: Callable[[ToGrillCoordinator], str | None]
set_packet: Callable[[ToGrillCoordinator, str], PacketWrite]
entity_supported: Callable[[Mapping[str, Any]], bool] = lambda _: True
probe_number: int | None = None
_ENUM = TypeVar("_ENUM", bound=Enum)
def _get_enum_from_name(type_: type[_ENUM], value: str) -> _ENUM | None:
"""Return enum value or None."""
if value == OPTION_NONE:
return None
return type_[value.upper()]
def _get_enum_from_value(type_: type[_ENUM], value: int | None) -> _ENUM | None:
"""Return enum value or None."""
if value is None:
return None
try:
return type_(value)
except ValueError:
return None
def _get_enum_options(type_: type[_ENUM]) -> list[str]:
"""Return a list of enum options."""
values = [OPTION_NONE]
values.extend(option.name.lower() for option in type_)
return values
def _get_probe_descriptions(
probe_number: int,
) -> Generator[ToGrillSelectEntityDescription]:
def _get_grill_info(
coordinator: ToGrillCoordinator,
) -> tuple[GrillType | None, Taste | None]:
if not (packet := coordinator.get_packet(PacketA8Notify, probe_number)):
return None, None
return _get_enum_from_value(GrillType, packet.grill_type), _get_enum_from_value(
Taste, packet.taste
)
def _set_grill_type(coordinator: ToGrillCoordinator, value: str) -> PacketWrite:
_, taste = _get_grill_info(coordinator)
grill_type = _get_enum_from_name(GrillType, value)
return PacketA303Write(probe=probe_number, grill_type=grill_type, taste=taste)
def _set_taste(coordinator: ToGrillCoordinator, value: str) -> PacketWrite:
grill_type, _ = _get_grill_info(coordinator)
taste = _get_enum_from_name(Taste, value)
return PacketA303Write(probe=probe_number, grill_type=grill_type, taste=taste)
def _get_grill_type(coordinator: ToGrillCoordinator) -> str | None:
grill_type, _ = _get_grill_info(coordinator)
if grill_type is None:
return OPTION_NONE
return grill_type.name.lower()
def _get_taste(coordinator: ToGrillCoordinator) -> str | None:
_, taste = _get_grill_info(coordinator)
if taste is None:
return OPTION_NONE
return taste.name.lower()
yield ToGrillSelectEntityDescription(
key=f"grill_type_{probe_number}",
translation_key="grill_type",
options=_get_enum_options(GrillType),
set_packet=_set_grill_type,
get_value=_get_grill_type,
entity_supported=lambda x: probe_number <= x[CONF_PROBE_COUNT],
probe_number=probe_number,
)
yield ToGrillSelectEntityDescription(
key=f"taste_{probe_number}",
translation_key="taste",
options=_get_enum_options(Taste),
set_packet=_set_taste,
get_value=_get_taste,
entity_supported=lambda x: probe_number <= x[CONF_PROBE_COUNT],
probe_number=probe_number,
)
ENTITY_DESCRIPTIONS = (
*[
description
for probe_number in range(1, MAX_PROBE_COUNT + 1)
for description in _get_probe_descriptions(probe_number)
],
)
async def async_setup_entry(
hass: HomeAssistant,
entry: ToGrillConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up select based on a config entry."""
coordinator = entry.runtime_data
async_add_entities(
ToGrillSelect(coordinator, entity_description)
for entity_description in ENTITY_DESCRIPTIONS
if entity_description.entity_supported(entry.data)
)
class ToGrillSelect(ToGrillEntity, SelectEntity):
"""Representation of a select entity."""
entity_description: ToGrillSelectEntityDescription
def __init__(
self,
coordinator: ToGrillCoordinator,
entity_description: ToGrillSelectEntityDescription,
) -> None:
"""Initialize."""
super().__init__(coordinator, probe_number=entity_description.probe_number)
self.entity_description = entity_description
self._attr_unique_id = f"{coordinator.address}_{entity_description.key}"
@property
def current_option(self) -> str | None:
"""Return the selected entity option to represent the entity state."""
return self.entity_description.get_value(self.coordinator)
async def async_select_option(self, option: str) -> None:
"""Set value on device."""
packet = self.entity_description.set_packet(self.coordinator, option)
await self._write_packet(packet)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/togrill/select.py",
"license": "Apache License 2.0",
"lines": 134,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/usage_prediction/common_control.py | """Code to generate common control usage patterns."""
from __future__ import annotations
from collections import Counter
from collections.abc import Callable, Sequence
from datetime import datetime, timedelta
from functools import cache
import logging
from typing import Any, Literal, cast
from sqlalchemy import select
from sqlalchemy.engine.row import Row
from sqlalchemy.orm import Session
from homeassistant.components.recorder import get_instance
from homeassistant.components.recorder.db_schema import EventData, Events, EventTypes
from homeassistant.components.recorder.models import uuid_hex_to_bytes_or_none
from homeassistant.components.recorder.util import session_scope
from homeassistant.const import Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.util import dt as dt_util
from homeassistant.util.json import json_loads_object
from .models import EntityUsagePredictions
_LOGGER = logging.getLogger(__name__)
# Time categories for usage patterns
TIME_CATEGORIES = ["morning", "afternoon", "evening", "night"]
RESULTS_TO_INCLUDE = 8
# List of domains for which we want to track usage
ALLOWED_DOMAINS = {
# Entity platforms
Platform.AIR_QUALITY,
Platform.ALARM_CONTROL_PANEL,
Platform.BINARY_SENSOR,
Platform.BUTTON,
Platform.CAMERA,
Platform.CLIMATE,
Platform.COVER,
Platform.FAN,
Platform.HUMIDIFIER,
Platform.LAWN_MOWER,
Platform.LIGHT,
Platform.LOCK,
Platform.MEDIA_PLAYER,
Platform.NUMBER,
Platform.SCENE,
Platform.SELECT,
Platform.SENSOR,
Platform.SIREN,
Platform.SWITCH,
Platform.VACUUM,
Platform.VALVE,
Platform.WATER_HEATER,
# Helpers with own domain
"counter",
"group",
"input_boolean",
"input_button",
"input_datetime",
"input_number",
"input_select",
"input_text",
"schedule",
"timer",
}
@cache
def time_category(hour: int) -> Literal["morning", "afternoon", "evening", "night"]:
"""Determine the time category for a given hour."""
if 6 <= hour < 12:
return "morning"
if 12 <= hour < 18:
return "afternoon"
if 18 <= hour < 22:
return "evening"
return "night"
async def async_predict_common_control(
hass: HomeAssistant, user_id: str
) -> EntityUsagePredictions:
"""Generate a list of commonly used entities for a user.
Args:
hass: Home Assistant instance
user_id: User ID to filter events by.
"""
# Get the recorder instance to ensure it's ready
recorder = get_instance(hass)
ent_reg = er.async_get(hass)
# Execute the database operation in the recorder's executor
data = await recorder.async_add_executor_job(
_fetch_with_session, hass, _fetch_and_process_data, ent_reg, user_id
)
# Prepare a dictionary to track results
results: dict[str, Counter[str]] = {
time_cat: Counter() for time_cat in TIME_CATEGORIES
}
allowed_entities = set(hass.states.async_entity_ids(ALLOWED_DOMAINS))
hidden_entities: set[str] = set()
# Keep track of contexts that we processed so that we will only process
# the first service call in a context, and not subsequent calls.
context_processed: set[bytes] = set()
# Execute the query
context_id: bytes
time_fired_ts: float
shared_data: str | None
local_time_zone = dt_util.get_default_time_zone()
for context_id, time_fired_ts, shared_data in data:
# Skip if we have already processed an event that was part of this context
if context_id in context_processed:
continue
# Mark this context as processed
context_processed.add(context_id)
# Parse the event data
if not time_fired_ts or not shared_data:
continue
try:
event_data = json_loads_object(shared_data)
except (ValueError, TypeError) as err:
_LOGGER.debug("Failed to parse event data: %s", err)
continue
# Empty event data, skipping
if not event_data:
continue
service_data = cast(dict[str, Any] | None, event_data.get("service_data"))
# No service data found, skipping
if not service_data:
continue
entity_ids: str | list[str] | None = service_data.get("entity_id")
# No entity IDs found, skip this event
if entity_ids is None:
continue
if not isinstance(entity_ids, list):
entity_ids = [entity_ids]
# Convert to local time for time category determination
period = time_category(
datetime.fromtimestamp(time_fired_ts, local_time_zone).hour
)
period_results = results[period]
# Count entity usage
for entity_id in entity_ids:
if entity_id not in allowed_entities or entity_id in hidden_entities:
continue
if (
entity_id not in period_results
and (entry := ent_reg.async_get(entity_id))
and entry.hidden
):
hidden_entities.add(entity_id)
continue
period_results[entity_id] += 1
return EntityUsagePredictions(
morning=[
ent_id for (ent_id, _) in results["morning"].most_common(RESULTS_TO_INCLUDE)
],
afternoon=[
ent_id
for (ent_id, _) in results["afternoon"].most_common(RESULTS_TO_INCLUDE)
],
evening=[
ent_id for (ent_id, _) in results["evening"].most_common(RESULTS_TO_INCLUDE)
],
night=[
ent_id for (ent_id, _) in results["night"].most_common(RESULTS_TO_INCLUDE)
],
)
def _fetch_and_process_data(
session: Session, ent_reg: er.EntityRegistry, user_id: str
) -> Sequence[Row[tuple[bytes | None, float | None, str | None]]]:
"""Fetch and process service call events from the database."""
thirty_days_ago_ts = (dt_util.utcnow() - timedelta(days=30)).timestamp()
user_id_bytes = uuid_hex_to_bytes_or_none(user_id)
if not user_id_bytes:
raise ValueError("Invalid user_id format")
# Build the main query for events with their data
query = (
select(
Events.context_id_bin,
Events.time_fired_ts,
EventData.shared_data,
)
.select_from(Events)
.outerjoin(EventData, Events.data_id == EventData.data_id)
.outerjoin(EventTypes, Events.event_type_id == EventTypes.event_type_id)
.where(Events.time_fired_ts >= thirty_days_ago_ts)
.where(Events.context_user_id_bin == user_id_bytes)
.where(EventTypes.event_type == "call_service")
.order_by(Events.time_fired_ts)
)
return session.connection().execute(query).all()
def _fetch_with_session(
hass: HomeAssistant,
fetch_func: Callable[
[Session], Sequence[Row[tuple[bytes | None, float | None, str | None]]]
],
*args: object,
) -> Sequence[Row[tuple[bytes | None, float | None, str | None]]]:
"""Execute a fetch function with a database session."""
with session_scope(hass=hass, read_only=True) as session:
return fetch_func(session, *args)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/usage_prediction/common_control.py",
"license": "Apache License 2.0",
"lines": 194,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/usage_prediction/const.py | """Constants for the usage prediction integration."""
import asyncio
from homeassistant.util.hass_dict import HassKey
from .models import EntityUsageDataCache, EntityUsagePredictions
DOMAIN = "usage_prediction"
DATA_CACHE: HassKey[
dict[str, asyncio.Task[EntityUsagePredictions] | EntityUsageDataCache]
] = HassKey("usage_prediction")
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/usage_prediction/const.py",
"license": "Apache License 2.0",
"lines": 8,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/usage_prediction/models.py | """Models for the usage prediction integration."""
from dataclasses import dataclass, field
from datetime import datetime
from homeassistant.util import dt as dt_util
@dataclass
class EntityUsagePredictions:
"""Prediction which entities are likely to be used in each time category."""
morning: list[str] = field(default_factory=list)
afternoon: list[str] = field(default_factory=list)
evening: list[str] = field(default_factory=list)
night: list[str] = field(default_factory=list)
@dataclass
class EntityUsageDataCache:
"""Data model for entity usage prediction."""
predictions: EntityUsagePredictions
timestamp: datetime = field(default_factory=dt_util.utcnow)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/usage_prediction/models.py",
"license": "Apache License 2.0",
"lines": 16,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/victron_remote_monitoring/config_flow.py | """Config flow for the Victron VRM Solar Forecast integration."""
from __future__ import annotations
from collections.abc import Mapping
import logging
from typing import Any
from victron_vrm import VictronVRMClient
from victron_vrm.exceptions import AuthenticationError, VictronVRMError
from victron_vrm.models import Site
import voluptuous as vol
from homeassistant.config_entries import ConfigFlow, ConfigFlowResult
from homeassistant.exceptions import HomeAssistantError
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.selector import (
SelectOptionDict,
SelectSelector,
SelectSelectorConfig,
SelectSelectorMode,
)
from .const import CONF_API_TOKEN, CONF_SITE_ID, DOMAIN
_LOGGER = logging.getLogger(__name__)
STEP_USER_DATA_SCHEMA = vol.Schema({vol.Required(CONF_API_TOKEN): str})
class CannotConnect(HomeAssistantError):
"""Error to indicate we cannot connect."""
class InvalidAuth(HomeAssistantError):
"""Error to indicate there is invalid auth."""
class SiteNotFound(HomeAssistantError):
"""Error to indicate the site was not found."""
class VictronRemoteMonitoringFlowHandler(ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Victron Remote Monitoring.
Supports reauthentication when the stored token becomes invalid.
"""
VERSION = 1
def __init__(self) -> None:
"""Initialize flow state."""
self._api_token: str | None = None
self._sites: list[Site] = []
def _build_site_options(self) -> list[SelectOptionDict]:
"""Build selector options for the available sites."""
return [
SelectOptionDict(
value=str(site.id), label=f"{(site.name or 'Site')} (ID:{site.id})"
)
for site in self._sites
]
async def _async_validate_token_and_fetch_sites(self, api_token: str) -> list[Site]:
"""Validate the API token and return available sites.
Raises InvalidAuth on bad/unauthorized token; CannotConnect on other errors.
"""
client = VictronVRMClient(
token=api_token,
client_session=async_get_clientsession(self.hass),
)
try:
sites = await client.users.list_sites()
except AuthenticationError as err:
raise InvalidAuth("Invalid authentication or permission") from err
except VictronVRMError as err:
if getattr(err, "status_code", None) in (401, 403):
raise InvalidAuth("Invalid authentication or permission") from err
raise CannotConnect(f"Cannot connect to VRM API: {err}") from err
else:
return sites
async def _async_validate_selected_site(self, api_token: str, site_id: int) -> Site:
"""Validate access to the selected site and return its data."""
client = VictronVRMClient(
token=api_token,
client_session=async_get_clientsession(self.hass),
)
try:
site_data = await client.users.get_site(site_id)
except AuthenticationError as err:
raise InvalidAuth("Invalid authentication or permission") from err
except VictronVRMError as err:
if getattr(err, "status_code", None) in (401, 403):
raise InvalidAuth("Invalid authentication or permission") from err
raise CannotConnect(f"Cannot connect to VRM API: {err}") from err
if site_data is None:
raise SiteNotFound(f"Site with ID {site_id} not found")
return site_data
async def async_step_user(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""First step: ask for API token and validate it."""
errors: dict[str, str] = {}
if user_input is not None:
api_token: str = user_input[CONF_API_TOKEN]
try:
sites = await self._async_validate_token_and_fetch_sites(api_token)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except Exception:
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
if not sites:
return self.async_show_form(
step_id="user",
data_schema=STEP_USER_DATA_SCHEMA,
errors={"base": "no_sites"},
)
self._api_token = api_token
# Sort sites by name then id for stable order
self._sites = sorted(sites, key=lambda s: (s.name or "", s.id))
if len(self._sites) == 1:
# Only one site available, skip site selection step
site = self._sites[0]
await self.async_set_unique_id(
str(site.id), raise_on_progress=False
)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=f"VRM for {site.name}",
data={CONF_API_TOKEN: self._api_token, CONF_SITE_ID: site.id},
)
return await self.async_step_select_site()
return self.async_show_form(
step_id="user", data_schema=STEP_USER_DATA_SCHEMA, errors=errors
)
async def async_step_select_site(
self, user_input: dict[str, Any] | None = None
) -> ConfigFlowResult:
"""Second step: present sites and validate selection."""
assert self._api_token is not None
if user_input is None:
site_options = self._build_site_options()
return self.async_show_form(
step_id="select_site",
data_schema=vol.Schema(
{
vol.Required(CONF_SITE_ID): SelectSelector(
SelectSelectorConfig(
options=site_options, mode=SelectSelectorMode.DROPDOWN
)
)
}
),
)
# User submitted a site selection
site_id = int(user_input[CONF_SITE_ID])
# Prevent duplicate entries for the same site
self._async_abort_entries_match({CONF_SITE_ID: site_id})
errors: dict[str, str] = {}
try:
site = await self._async_validate_selected_site(self._api_token, site_id)
except CannotConnect:
errors["base"] = "cannot_connect"
except InvalidAuth:
errors["base"] = "invalid_auth"
except SiteNotFound:
errors["base"] = "site_not_found"
except Exception: # pragma: no cover - unexpected
_LOGGER.exception("Unexpected exception")
errors["base"] = "unknown"
else:
# Ensure unique ID per site to avoid duplicates across reloads
await self.async_set_unique_id(str(site_id), raise_on_progress=False)
self._abort_if_unique_id_configured()
return self.async_create_entry(
title=f"VRM for {site.name}",
data={CONF_API_TOKEN: self._api_token, CONF_SITE_ID: site_id},
)
# If we reach here, show the selection form again with errors
site_options = self._build_site_options()
return self.async_show_form(
step_id="select_site",
data_schema=vol.Schema(
{
vol.Required(CONF_SITE_ID): SelectSelector(
SelectSelectorConfig(
options=site_options, mode=SelectSelectorMode.DROPDOWN
)
)
}
),
errors=errors,
)
async def async_step_reauth(
self, entry_data: Mapping[str, Any]
) -> ConfigFlowResult:
"""Start reauthentication by asking for a (new) API token.
We only need the token again; the site is fixed per entry and set as unique id.
"""
self._api_token = None
self._sites = []
return await self.async_step_reauth_confirm()
async def async_step_reauth_confirm(
self, user_input: Mapping[str, Any] | None = None
) -> ConfigFlowResult:
"""Handle reauthentication confirmation with new token."""
errors: dict[str, str] = {}
reauth_entry = self._get_reauth_entry()
if user_input is not None:
new_token = user_input[CONF_API_TOKEN]
site_id: int = reauth_entry.data[CONF_SITE_ID]
try:
# Validate the token by fetching the site for the existing entry
await self._async_validate_selected_site(new_token, site_id)
except InvalidAuth:
errors["base"] = "invalid_auth"
except CannotConnect:
errors["base"] = "cannot_connect"
except SiteNotFound:
# Site removed or no longer visible to the account; treat as cannot connect
errors["base"] = "site_not_found"
except Exception: # pragma: no cover - unexpected
_LOGGER.exception("Unexpected exception during reauth")
errors["base"] = "unknown"
else:
# Update stored token and reload entry
return self.async_update_reload_and_abort(
reauth_entry,
data_updates={CONF_API_TOKEN: new_token},
reason="reauth_successful",
)
return self.async_show_form(
step_id="reauth_confirm",
data_schema=vol.Schema({vol.Required(CONF_API_TOKEN): str}),
errors=errors,
)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/victron_remote_monitoring/config_flow.py",
"license": "Apache License 2.0",
"lines": 221,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/victron_remote_monitoring/const.py | """Constants for the Victron VRM Solar Forecast integration."""
import logging
DOMAIN = "victron_remote_monitoring"
LOGGER = logging.getLogger(__package__)
CONF_SITE_ID = "site_id"
CONF_API_TOKEN = "api_token"
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/victron_remote_monitoring/const.py",
"license": "Apache License 2.0",
"lines": 6,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/victron_remote_monitoring/coordinator.py | """VRM Coordinator and Client."""
from dataclasses import dataclass
import datetime
from victron_vrm import VictronVRMClient
from victron_vrm.exceptions import AuthenticationError, VictronVRMError
from victron_vrm.models.aggregations import ForecastAggregations
from victron_vrm.utils import dt_now
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryAuthFailed
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import CONF_API_TOKEN, CONF_SITE_ID, DOMAIN, LOGGER
type VictronRemoteMonitoringConfigEntry = ConfigEntry[
VictronRemoteMonitoringDataUpdateCoordinator
]
@dataclass
class VRMForecastStore:
"""Class to hold the forecast data."""
site_id: int
solar: ForecastAggregations | None
consumption: ForecastAggregations | None
async def get_forecast(client: VictronVRMClient, site_id: int) -> VRMForecastStore:
"""Get the forecast data."""
start = int(
(
dt_now().replace(hour=0, minute=0, second=0, microsecond=0)
- datetime.timedelta(days=1)
).timestamp()
)
# Get timestamp of the end of 6th day from now
end = int(
(
dt_now().replace(hour=0, minute=0, second=0, microsecond=0)
+ datetime.timedelta(days=6)
).timestamp()
)
stats = await client.installations.stats(
site_id,
start=start,
end=end,
interval="hours",
type="forecast",
return_aggregations=True,
)
return VRMForecastStore(
solar=stats["solar_yield"],
consumption=stats["consumption"],
site_id=site_id,
)
class VictronRemoteMonitoringDataUpdateCoordinator(
DataUpdateCoordinator[VRMForecastStore]
):
"""Class to manage fetching VRM Forecast data."""
config_entry: VictronRemoteMonitoringConfigEntry
def __init__(
self,
hass: HomeAssistant,
config_entry: VictronRemoteMonitoringConfigEntry,
) -> None:
"""Initialize."""
self.client = VictronVRMClient(
token=config_entry.data[CONF_API_TOKEN],
client_session=async_get_clientsession(hass),
)
self.site_id = config_entry.data[CONF_SITE_ID]
super().__init__(
hass,
LOGGER,
config_entry=config_entry,
name=DOMAIN,
update_interval=datetime.timedelta(minutes=60),
)
async def _async_update_data(self) -> VRMForecastStore:
"""Fetch data from VRM API."""
try:
return await get_forecast(self.client, self.site_id)
except AuthenticationError as err:
raise ConfigEntryAuthFailed(
f"Invalid authentication for VRM API: {err}"
) from err
except VictronVRMError as err:
raise UpdateFailed(f"Cannot connect to VRM API: {err}") from err
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/victron_remote_monitoring/coordinator.py",
"license": "Apache License 2.0",
"lines": 83,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/victron_remote_monitoring/sensor.py | """Support for the VRM Solar Forecast sensor service."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from datetime import datetime
from homeassistant.components.sensor import (
SensorDeviceClass,
SensorEntity,
SensorEntityDescription,
SensorStateClass,
)
from homeassistant.const import EntityCategory, UnitOfEnergy
from homeassistant.core import HomeAssistant
from homeassistant.helpers.device_registry import DeviceEntryType, DeviceInfo
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.helpers.typing import StateType
from homeassistant.helpers.update_coordinator import CoordinatorEntity
from .const import DOMAIN
from .coordinator import (
VictronRemoteMonitoringConfigEntry,
VictronRemoteMonitoringDataUpdateCoordinator,
VRMForecastStore,
)
@dataclass(frozen=True, kw_only=True)
class VRMForecastsSensorEntityDescription(SensorEntityDescription):
"""Describes a VRM Forecast Sensor."""
value_fn: Callable[[VRMForecastStore], int | float | datetime | None]
SENSORS: tuple[VRMForecastsSensorEntityDescription, ...] = (
# Solar forecast sensors
VRMForecastsSensorEntityDescription(
key="energy_production_estimate_yesterday",
translation_key="energy_production_estimate_yesterday",
value_fn=lambda store: (
store.solar.yesterday_total if store.solar is not None else None
),
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
suggested_display_precision=1,
),
VRMForecastsSensorEntityDescription(
key="energy_production_estimate_today",
translation_key="energy_production_estimate_today",
value_fn=lambda store: (
store.solar.today_total if store.solar is not None else None
),
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
suggested_display_precision=1,
),
VRMForecastsSensorEntityDescription(
key="energy_production_estimate_today_remaining",
translation_key="energy_production_estimate_today_remaining",
value_fn=lambda store: (
store.solar.today_left_total if store.solar is not None else None
),
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
suggested_display_precision=1,
),
VRMForecastsSensorEntityDescription(
key="energy_production_estimate_tomorrow",
translation_key="energy_production_estimate_tomorrow",
value_fn=lambda store: (
store.solar.tomorrow_total if store.solar is not None else None
),
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
suggested_display_precision=1,
),
VRMForecastsSensorEntityDescription(
key="power_highest_peak_time_yesterday",
translation_key="power_highest_peak_time_yesterday",
value_fn=lambda store: (
store.solar.yesterday_peak_time if store.solar is not None else None
),
device_class=SensorDeviceClass.TIMESTAMP,
),
VRMForecastsSensorEntityDescription(
key="power_highest_peak_time_today",
translation_key="power_highest_peak_time_today",
value_fn=lambda store: (
store.solar.today_peak_time if store.solar is not None else None
),
device_class=SensorDeviceClass.TIMESTAMP,
),
VRMForecastsSensorEntityDescription(
key="power_highest_peak_time_tomorrow",
translation_key="power_highest_peak_time_tomorrow",
value_fn=lambda store: (
store.solar.tomorrow_peak_time if store.solar is not None else None
),
device_class=SensorDeviceClass.TIMESTAMP,
),
VRMForecastsSensorEntityDescription(
key="energy_production_current_hour",
translation_key="energy_production_current_hour",
value_fn=lambda store: (
store.solar.current_hour_total if store.solar is not None else None
),
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
suggested_display_precision=1,
),
VRMForecastsSensorEntityDescription(
key="energy_production_next_hour",
translation_key="energy_production_next_hour",
value_fn=lambda store: (
store.solar.next_hour_total if store.solar is not None else None
),
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
suggested_display_precision=1,
),
# Consumption forecast sensors
VRMForecastsSensorEntityDescription(
key="energy_consumption_estimate_yesterday",
translation_key="energy_consumption_estimate_yesterday",
value_fn=lambda store: (
store.consumption.yesterday_total if store.consumption is not None else None
),
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
suggested_display_precision=1,
),
VRMForecastsSensorEntityDescription(
key="energy_consumption_estimate_today",
translation_key="energy_consumption_estimate_today",
value_fn=lambda store: (
store.consumption.today_total if store.consumption is not None else None
),
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
suggested_display_precision=1,
),
VRMForecastsSensorEntityDescription(
key="energy_consumption_estimate_today_remaining",
translation_key="energy_consumption_estimate_today_remaining",
value_fn=lambda store: (
store.consumption.today_left_total
if store.consumption is not None
else None
),
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
suggested_display_precision=1,
),
VRMForecastsSensorEntityDescription(
key="energy_consumption_estimate_tomorrow",
translation_key="energy_consumption_estimate_tomorrow",
value_fn=lambda store: (
store.consumption.tomorrow_total if store.consumption is not None else None
),
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
suggested_display_precision=1,
),
VRMForecastsSensorEntityDescription(
key="consumption_highest_peak_time_yesterday",
translation_key="consumption_highest_peak_time_yesterday",
value_fn=lambda store: (
store.consumption.yesterday_peak_time
if store.consumption is not None
else None
),
device_class=SensorDeviceClass.TIMESTAMP,
),
VRMForecastsSensorEntityDescription(
key="consumption_highest_peak_time_today",
translation_key="consumption_highest_peak_time_today",
value_fn=lambda store: (
store.consumption.today_peak_time if store.consumption is not None else None
),
device_class=SensorDeviceClass.TIMESTAMP,
),
VRMForecastsSensorEntityDescription(
key="consumption_highest_peak_time_tomorrow",
translation_key="consumption_highest_peak_time_tomorrow",
value_fn=lambda store: (
store.consumption.tomorrow_peak_time
if store.consumption is not None
else None
),
device_class=SensorDeviceClass.TIMESTAMP,
),
VRMForecastsSensorEntityDescription(
key="energy_consumption_current_hour",
translation_key="energy_consumption_current_hour",
value_fn=lambda store: (
store.consumption.current_hour_total
if store.consumption is not None
else None
),
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
suggested_display_precision=1,
),
VRMForecastsSensorEntityDescription(
key="energy_consumption_next_hour",
translation_key="energy_consumption_next_hour",
value_fn=lambda store: (
store.consumption.next_hour_total if store.consumption is not None else None
),
device_class=SensorDeviceClass.ENERGY,
state_class=SensorStateClass.TOTAL,
native_unit_of_measurement=UnitOfEnergy.WATT_HOUR,
suggested_unit_of_measurement=UnitOfEnergy.KILO_WATT_HOUR,
suggested_display_precision=1,
),
)
async def async_setup_entry(
hass: HomeAssistant,
entry: VictronRemoteMonitoringConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Defer sensor setup to the shared sensor module."""
coordinator = entry.runtime_data
async_add_entities(
VRMForecastsSensorEntity(
entry_id=entry.entry_id,
coordinator=coordinator,
description=entity_description,
)
for entity_description in SENSORS
)
class VRMForecastsSensorEntity(
CoordinatorEntity[VictronRemoteMonitoringDataUpdateCoordinator], SensorEntity
):
"""Defines a VRM Solar Forecast sensor."""
entity_description: VRMForecastsSensorEntityDescription
_attr_has_entity_name = True
_attr_entity_category = EntityCategory.DIAGNOSTIC
def __init__(
self,
*,
entry_id: str,
coordinator: VictronRemoteMonitoringDataUpdateCoordinator,
description: VRMForecastsSensorEntityDescription,
) -> None:
"""Initialize VRM Solar Forecast sensor."""
super().__init__(coordinator=coordinator)
self.entity_description = description
self._attr_unique_id = f"{coordinator.data.site_id}|{description.key}"
self._attr_device_info = DeviceInfo(
entry_type=DeviceEntryType.SERVICE,
identifiers={(DOMAIN, str(coordinator.data.site_id))},
manufacturer="Victron Energy",
model=f"VRM - {coordinator.data.site_id}",
name="Victron Remote Monitoring",
configuration_url="https://vrm.victronenergy.com",
)
@property
def native_value(self) -> datetime | StateType:
"""Return the state of the sensor."""
return self.entity_description.value_fn(self.coordinator.data)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/victron_remote_monitoring/sensor.py",
"license": "Apache License 2.0",
"lines": 276,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/components/weatherflow/event.py | """Event entities for the WeatherFlow integration."""
from __future__ import annotations
from dataclasses import dataclass
from pyweatherflowudp.device import EVENT_RAIN_START, EVENT_STRIKE, WeatherFlowDevice
from homeassistant.components.event import EventEntity, EventEntityDescription
from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback
from homeassistant.helpers.device_registry import DeviceInfo
from homeassistant.helpers.dispatcher import async_dispatcher_connect
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from .const import DOMAIN, LOGGER, format_dispatch_call
@dataclass(frozen=True, kw_only=True)
class WeatherFlowEventEntityDescription(EventEntityDescription):
"""Describes a WeatherFlow event entity."""
wf_event: str
event_types: list[str]
EVENT_DESCRIPTIONS: list[WeatherFlowEventEntityDescription] = [
WeatherFlowEventEntityDescription(
key="precip_start_event",
translation_key="precip_start_event",
event_types=["precipitation_start"],
wf_event=EVENT_RAIN_START,
),
WeatherFlowEventEntityDescription(
key="lightning_strike_event",
translation_key="lightning_strike_event",
event_types=["lightning_strike"],
wf_event=EVENT_STRIKE,
),
]
async def async_setup_entry(
hass: HomeAssistant,
config_entry: ConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up WeatherFlow event entities using config entry."""
@callback
def async_add_events(device: WeatherFlowDevice) -> None:
LOGGER.debug("Adding events for %s", device)
async_add_entities(
WeatherFlowEventEntity(device, description)
for description in EVENT_DESCRIPTIONS
)
config_entry.async_on_unload(
async_dispatcher_connect(
hass,
format_dispatch_call(config_entry),
async_add_events,
)
)
class WeatherFlowEventEntity(EventEntity):
"""Generic WeatherFlow event entity."""
_attr_has_entity_name = True
entity_description: WeatherFlowEventEntityDescription
def __init__(
self,
device: WeatherFlowDevice,
description: WeatherFlowEventEntityDescription,
) -> None:
"""Initialize the WeatherFlow event entity."""
self.device = device
self.entity_description = description
self._attr_device_info = DeviceInfo(
identifiers={(DOMAIN, device.serial_number)},
manufacturer="WeatherFlow",
model=device.model,
name=device.serial_number,
sw_version=device.firmware_revision,
)
self._attr_unique_id = f"{device.serial_number}_{description.key}"
async def async_added_to_hass(self) -> None:
"""Subscribe to the configured WeatherFlow device event."""
self.async_on_remove(
self.device.on(self.entity_description.wf_event, self._handle_event)
)
@callback
def _handle_event(self, event) -> None:
self._trigger_event(
self.entity_description.event_types[0],
{},
)
self.async_write_ha_state()
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/weatherflow/event.py",
"license": "Apache License 2.0",
"lines": 82,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/components/workday/calendar.py | """Workday Calendar."""
from __future__ import annotations
from datetime import date, datetime, timedelta
from holidays import HolidayBase
from homeassistant.components.calendar import CalendarEntity, CalendarEvent
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant
from homeassistant.helpers.entity_platform import AddConfigEntryEntitiesCallback
from homeassistant.util import dt as dt_util
from . import WorkdayConfigEntry
from .const import CONF_EXCLUDES, CONF_OFFSET, CONF_WORKDAYS
from .entity import BaseWorkdayEntity
async def async_setup_entry(
hass: HomeAssistant,
entry: WorkdayConfigEntry,
async_add_entities: AddConfigEntryEntitiesCallback,
) -> None:
"""Set up the Holiday Calendar config entry."""
days_offset: int = int(entry.options[CONF_OFFSET])
excludes: list[str] = entry.options[CONF_EXCLUDES]
sensor_name: str = entry.options[CONF_NAME]
workdays: list[str] = entry.options[CONF_WORKDAYS]
obj_holidays = entry.runtime_data
async_add_entities(
[
WorkdayCalendarEntity(
obj_holidays,
workdays,
excludes,
days_offset,
sensor_name,
entry.entry_id,
)
],
)
class WorkdayCalendarEntity(BaseWorkdayEntity, CalendarEntity):
"""Representation of a Workday Calendar."""
def __init__(
self,
obj_holidays: HolidayBase,
workdays: list[str],
excludes: list[str],
days_offset: int,
name: str,
entry_id: str,
) -> None:
"""Initialize WorkdayCalendarEntity."""
super().__init__(
obj_holidays,
workdays,
excludes,
days_offset,
name,
entry_id,
)
self._attr_unique_id = entry_id
self._attr_event = None
self.event_list: list[CalendarEvent] = []
self._name = name
def update_data(self, now: datetime) -> None:
"""Update data."""
event_list = []
start_date = date(now.year, 1, 1)
end_number_of_days = date(now.year + 1, 12, 31) - start_date
for i in range(end_number_of_days.days + 1):
future_date = start_date + timedelta(days=i)
if self.date_is_workday(future_date):
event = CalendarEvent(
summary=self._name,
start=future_date,
end=future_date,
)
event_list.append(event)
self.event_list = event_list
@property
def event(self) -> CalendarEvent | None:
"""Return the next upcoming event."""
sorted_list: list[CalendarEvent] | None = (
sorted(self.event_list, key=lambda e: e.start) if self.event_list else None
)
if not sorted_list:
return None
return [d for d in sorted_list if d.start >= dt_util.utcnow().date()][0]
async def async_get_events(
self, hass: HomeAssistant, start_date: datetime, end_date: datetime
) -> list[CalendarEvent]:
"""Get all events in a specific time frame."""
return [
workday
for workday in self.event_list
if start_date.date() <= workday.start <= end_date.date()
]
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/components/workday/calendar.py",
"license": "Apache License 2.0",
"lines": 92,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/helpers/service_info/esphome.py | """ESPHome discovery data."""
from dataclasses import dataclass
from yarl import URL
from homeassistant.data_entry_flow import BaseServiceInfo
@dataclass(slots=True)
class ESPHomeServiceInfo(BaseServiceInfo):
"""Prepared info from ESPHome entries."""
name: str
zwave_home_id: int | None
ip_address: str
port: int
noise_psk: str | None = None
@property
def socket_path(self) -> str:
"""Return the socket path to connect to the ESPHome device."""
url = URL.build(scheme="esphome", host=self.ip_address, port=self.port)
if self.noise_psk:
url = url.with_user(self.noise_psk)
return str(url)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/helpers/service_info/esphome.py",
"license": "Apache License 2.0",
"lines": 19,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/helpers/template/context.py | """Template context management for Home Assistant."""
from __future__ import annotations
from contextlib import AbstractContextManager
from contextvars import ContextVar
from types import TracebackType
from typing import Any
import jinja2
# Context variable for template string tracking
template_cv: ContextVar[tuple[str, str] | None] = ContextVar(
"template_cv", default=None
)
class TemplateContextManager(AbstractContextManager):
"""Context manager to store template being parsed or rendered in a ContextVar."""
def set_template(self, template_str: str, action: str) -> None:
"""Store template being parsed or rendered in a Contextvar to aid error handling."""
template_cv.set((template_str, action))
def __exit__(
self,
exc_type: type[BaseException] | None,
exc_value: BaseException | None,
traceback: TracebackType | None,
) -> None:
"""Raise any exception triggered within the runtime context."""
template_cv.set(None)
# Global context manager instance
template_context_manager = TemplateContextManager()
def render_with_context(
template_str: str, template: jinja2.Template, **kwargs: Any
) -> str:
"""Store template being rendered in a ContextVar to aid error handling."""
with template_context_manager as cm:
cm.set_template(template_str, "rendering")
return template.render(**kwargs)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/helpers/template/context.py",
"license": "Apache License 2.0",
"lines": 33,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/helpers/template/extensions/base.py | """Base extension class for Home Assistant template extensions."""
from __future__ import annotations
from collections.abc import Callable
from dataclasses import dataclass
from functools import wraps
from typing import TYPE_CHECKING, Any, Concatenate, NoReturn
from jinja2 import pass_context
from jinja2.ext import Extension
from jinja2.nodes import Node
from jinja2.parser import Parser
from homeassistant.exceptions import TemplateError
if TYPE_CHECKING:
from homeassistant.core import HomeAssistant
from homeassistant.helpers.template import TemplateEnvironment
@dataclass
class TemplateFunction:
"""Definition for a template function, filter, or test."""
name: str
func: Callable[..., Any] | Any
as_global: bool = False
as_filter: bool = False
as_test: bool = False
limited_ok: bool = (
True # Whether this function is available in limited environments
)
requires_hass: bool = False # Whether this function requires hass to be available
def _pass_context[**_P, _R](
func: Callable[Concatenate[Any, _P], _R],
jinja_context: Callable[
[Callable[Concatenate[Any, _P], _R]],
Callable[Concatenate[Any, _P], _R],
] = pass_context,
) -> Callable[Concatenate[Any, _P], _R]:
"""Wrap function to pass context.
We mark these as a context functions to ensure they get
evaluated fresh with every execution, rather than executed
at compile time and the value stored. The context itself
can be discarded.
"""
@wraps(func)
def wrapper(_: Any, *args: _P.args, **kwargs: _P.kwargs) -> _R:
return func(*args, **kwargs)
return jinja_context(wrapper)
class BaseTemplateExtension(Extension):
"""Base class for Home Assistant template extensions."""
environment: TemplateEnvironment
def __init__(
self,
environment: TemplateEnvironment,
*,
functions: list[TemplateFunction] | None = None,
) -> None:
"""Initialize the extension with a list of template functions."""
super().__init__(environment)
if functions:
for template_func in functions:
# Skip functions that require hass when hass is not available
if template_func.requires_hass and self.environment.hass is None:
continue
# Register unsupported stub for functions not allowed in limited environments
if self.environment.limited and not template_func.limited_ok:
unsupported_func = self._create_unsupported_function(
template_func.name
)
if template_func.as_global:
environment.globals[template_func.name] = unsupported_func
if template_func.as_filter:
environment.filters[template_func.name] = unsupported_func
if template_func.as_test:
environment.tests[template_func.name] = unsupported_func
continue
func = template_func.func
if template_func.requires_hass:
# We wrap these as a context functions to ensure they get
# evaluated fresh with every execution, rather than executed
# at compile time and the value stored.
func = _pass_context(func)
if template_func.as_global:
environment.globals[template_func.name] = func
if template_func.as_filter:
environment.filters[template_func.name] = func
if template_func.as_test:
environment.tests[template_func.name] = func
@staticmethod
def _create_unsupported_function(name: str) -> Callable[[], NoReturn]:
"""Create a function that raises an error for unsupported functions in limited templates."""
def unsupported(*args: Any, **kwargs: Any) -> NoReturn:
raise TemplateError(
f"Use of '{name}' is not supported in limited templates"
)
return unsupported
@property
def hass(self) -> HomeAssistant:
"""Return the Home Assistant instance.
This property should only be used in extensions that have functions
marked with requires_hass=True, as it assumes hass is not None.
Raises:
RuntimeError: If hass is not available in the environment.
"""
if self.environment.hass is None:
raise RuntimeError(
"Home Assistant instance is not available. "
"This property should only be used in extensions with "
"functions marked requires_hass=True."
)
return self.environment.hass
def parse(self, parser: Parser) -> Node | list[Node]:
"""Required by Jinja2 Extension base class."""
return []
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/helpers/template/extensions/base.py",
"license": "Apache License 2.0",
"lines": 109,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/helpers/template/extensions/base64.py | """Base64 encoding and decoding functions for Home Assistant templates."""
from __future__ import annotations
import base64
from typing import TYPE_CHECKING
from .base import BaseTemplateExtension, TemplateFunction
if TYPE_CHECKING:
from homeassistant.helpers.template import TemplateEnvironment
class Base64Extension(BaseTemplateExtension):
"""Jinja2 extension for base64 encoding and decoding functions."""
def __init__(self, environment: TemplateEnvironment) -> None:
"""Initialize the base64 extension."""
super().__init__(
environment,
functions=[
TemplateFunction(
"base64_encode",
self.base64_encode,
as_filter=True,
limited_ok=False,
),
TemplateFunction(
"base64_decode",
self.base64_decode,
as_filter=True,
limited_ok=False,
),
],
)
@staticmethod
def base64_encode(value: str | bytes) -> str:
"""Encode a string or bytes to base64."""
if isinstance(value, str):
value = value.encode("utf-8")
return base64.b64encode(value).decode("utf-8")
@staticmethod
def base64_decode(value: str, encoding: str | None = "utf-8") -> str | bytes:
"""Decode a base64 string."""
decoded = base64.b64decode(value)
if encoding is None:
return decoded
return decoded.decode(encoding)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/helpers/template/extensions/base64.py",
"license": "Apache License 2.0",
"lines": 41,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/helpers/template/extensions/collection.py | """Collection and data structure functions for Home Assistant templates."""
from __future__ import annotations
from collections.abc import Iterable, MutableSequence
import random
from typing import TYPE_CHECKING, Any
from .base import BaseTemplateExtension, TemplateFunction
if TYPE_CHECKING:
from homeassistant.helpers.template import TemplateEnvironment
class CollectionExtension(BaseTemplateExtension):
"""Extension for collection and data structure operations."""
def __init__(self, environment: TemplateEnvironment) -> None:
"""Initialize the collection extension."""
super().__init__(
environment,
functions=[
TemplateFunction(
"flatten",
self.flatten,
as_global=True,
as_filter=True,
),
TemplateFunction(
"shuffle",
self.shuffle,
as_global=True,
as_filter=True,
),
# Set operations
TemplateFunction(
"intersect",
self.intersect,
as_global=True,
as_filter=True,
),
TemplateFunction(
"difference",
self.difference,
as_global=True,
as_filter=True,
),
TemplateFunction(
"union",
self.union,
as_global=True,
as_filter=True,
),
TemplateFunction(
"symmetric_difference",
self.symmetric_difference,
as_global=True,
as_filter=True,
),
# Type conversion functions
TemplateFunction(
"set",
self.to_set,
as_global=True,
),
TemplateFunction(
"tuple",
self.to_tuple,
as_global=True,
),
# Type checking functions (tests)
TemplateFunction(
"list",
self.is_list,
as_test=True,
),
TemplateFunction(
"set",
self.is_set,
as_test=True,
),
TemplateFunction(
"tuple",
self.is_tuple,
as_test=True,
),
],
)
def flatten(self, value: Iterable[Any], levels: int | None = None) -> list[Any]:
"""Flatten list of lists."""
if not isinstance(value, Iterable) or isinstance(value, str):
raise TypeError(f"flatten expected a list, got {type(value).__name__}")
flattened: list[Any] = []
for item in value:
if isinstance(item, Iterable) and not isinstance(item, str):
if levels is None:
flattened.extend(self.flatten(item))
elif levels >= 1:
flattened.extend(self.flatten(item, levels=(levels - 1)))
else:
flattened.append(item)
else:
flattened.append(item)
return flattened
def shuffle(self, *args: Any, seed: Any = None) -> MutableSequence[Any]:
"""Shuffle a list, either with a seed or without."""
if not args:
raise TypeError("shuffle expected at least 1 argument, got 0")
# If first argument is iterable and more than 1 argument provided
# but not a named seed, then use 2nd argument as seed.
if isinstance(args[0], Iterable) and not isinstance(args[0], str):
items = list(args[0])
if len(args) > 1 and seed is None:
seed = args[1]
elif len(args) == 1:
raise TypeError(f"'{type(args[0]).__name__}' object is not iterable")
else:
items = list(args)
if seed:
r = random.Random(seed)
r.shuffle(items)
else:
random.shuffle(items)
return items
def intersect(self, value: Iterable[Any], other: Iterable[Any]) -> list[Any]:
"""Return the common elements between two lists."""
if not isinstance(value, Iterable) or isinstance(value, str):
raise TypeError(f"intersect expected a list, got {type(value).__name__}")
if not isinstance(other, Iterable) or isinstance(other, str):
raise TypeError(f"intersect expected a list, got {type(other).__name__}")
return list(set(value) & set(other))
def difference(self, value: Iterable[Any], other: Iterable[Any]) -> list[Any]:
"""Return elements in first list that are not in second list."""
if not isinstance(value, Iterable) or isinstance(value, str):
raise TypeError(f"difference expected a list, got {type(value).__name__}")
if not isinstance(other, Iterable) or isinstance(other, str):
raise TypeError(f"difference expected a list, got {type(other).__name__}")
return list(set(value) - set(other))
def union(self, value: Iterable[Any], other: Iterable[Any]) -> list[Any]:
"""Return all unique elements from both lists combined."""
if not isinstance(value, Iterable) or isinstance(value, str):
raise TypeError(f"union expected a list, got {type(value).__name__}")
if not isinstance(other, Iterable) or isinstance(other, str):
raise TypeError(f"union expected a list, got {type(other).__name__}")
return list(set(value) | set(other))
def symmetric_difference(
self, value: Iterable[Any], other: Iterable[Any]
) -> list[Any]:
"""Return elements that are in either list but not in both."""
if not isinstance(value, Iterable) or isinstance(value, str):
raise TypeError(
f"symmetric_difference expected a list, got {type(value).__name__}"
)
if not isinstance(other, Iterable) or isinstance(other, str):
raise TypeError(
f"symmetric_difference expected a list, got {type(other).__name__}"
)
return list(set(value) ^ set(other))
def to_set(self, value: Any) -> set[Any]:
"""Convert value to set."""
return set(value)
def to_tuple(self, value: Any) -> tuple[Any, ...]:
"""Convert value to tuple."""
return tuple(value)
def is_list(self, value: Any) -> bool:
"""Return whether a value is a list."""
return isinstance(value, list)
def is_set(self, value: Any) -> bool:
"""Return whether a value is a set."""
return isinstance(value, set)
def is_tuple(self, value: Any) -> bool:
"""Return whether a value is a tuple."""
return isinstance(value, tuple)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/helpers/template/extensions/collection.py",
"license": "Apache License 2.0",
"lines": 166,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/helpers/template/extensions/crypto.py | """Cryptographic hash functions for Home Assistant templates."""
from __future__ import annotations
import hashlib
from typing import TYPE_CHECKING
from .base import BaseTemplateExtension, TemplateFunction
if TYPE_CHECKING:
from homeassistant.helpers.template import TemplateEnvironment
class CryptoExtension(BaseTemplateExtension):
"""Jinja2 extension for cryptographic hash functions."""
def __init__(self, environment: TemplateEnvironment) -> None:
"""Initialize the crypto extension."""
super().__init__(
environment,
functions=[
# Hash functions (as globals and filters)
TemplateFunction(
"md5", self.md5, as_global=True, as_filter=True, limited_ok=False
),
TemplateFunction(
"sha1", self.sha1, as_global=True, as_filter=True, limited_ok=False
),
TemplateFunction(
"sha256",
self.sha256,
as_global=True,
as_filter=True,
limited_ok=False,
),
TemplateFunction(
"sha512",
self.sha512,
as_global=True,
as_filter=True,
limited_ok=False,
),
],
)
@staticmethod
def md5(value: str) -> str:
"""Generate md5 hash from a string."""
return hashlib.md5(value.encode()).hexdigest()
@staticmethod
def sha1(value: str) -> str:
"""Generate sha1 hash from a string."""
return hashlib.sha1(value.encode()).hexdigest()
@staticmethod
def sha256(value: str) -> str:
"""Generate sha256 hash from a string."""
return hashlib.sha256(value.encode()).hexdigest()
@staticmethod
def sha512(value: str) -> str:
"""Generate sha512 hash from a string."""
return hashlib.sha512(value.encode()).hexdigest()
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/helpers/template/extensions/crypto.py",
"license": "Apache License 2.0",
"lines": 53,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/helpers/template/extensions/math.py | """Mathematical and statistical functions for Home Assistant templates."""
from __future__ import annotations
from collections.abc import Iterable
from functools import wraps
import math
import statistics
from typing import TYPE_CHECKING, Any, Literal
import jinja2
from jinja2 import pass_environment
from homeassistant.helpers.template.helpers import raise_no_default
from .base import BaseTemplateExtension, TemplateFunction
if TYPE_CHECKING:
from homeassistant.helpers.template import TemplateEnvironment
# Sentinel object for default parameter
_SENTINEL = object()
class MathExtension(BaseTemplateExtension):
"""Jinja2 extension for mathematical and statistical functions."""
def __init__(self, environment: TemplateEnvironment) -> None:
"""Initialize the math extension."""
super().__init__(
environment,
functions=[
# Math constants (as globals only) - these are values, not functions
TemplateFunction("e", math.e, as_global=True),
TemplateFunction("pi", math.pi, as_global=True),
TemplateFunction("tau", math.pi * 2, as_global=True),
# Trigonometric functions (as globals and filters)
TemplateFunction("sin", self.sine, as_global=True, as_filter=True),
TemplateFunction("cos", self.cosine, as_global=True, as_filter=True),
TemplateFunction("tan", self.tangent, as_global=True, as_filter=True),
TemplateFunction("asin", self.arc_sine, as_global=True, as_filter=True),
TemplateFunction(
"acos", self.arc_cosine, as_global=True, as_filter=True
),
TemplateFunction(
"atan", self.arc_tangent, as_global=True, as_filter=True
),
TemplateFunction(
"atan2", self.arc_tangent2, as_global=True, as_filter=True
),
# Advanced math functions (as globals and filters)
TemplateFunction("log", self.logarithm, as_global=True, as_filter=True),
TemplateFunction(
"sqrt", self.square_root, as_global=True, as_filter=True
),
# Statistical functions (as globals and filters)
TemplateFunction(
"average", self.average, as_global=True, as_filter=True
),
TemplateFunction("median", self.median, as_global=True, as_filter=True),
TemplateFunction(
"statistical_mode",
self.statistical_mode,
as_global=True,
as_filter=True,
),
# Min/Max functions (as globals only)
TemplateFunction("min", self.min_max_min, as_global=True),
TemplateFunction("max", self.min_max_max, as_global=True),
# Bitwise operations (as globals and filters)
TemplateFunction(
"bitwise_and", self.bitwise_and, as_global=True, as_filter=True
),
TemplateFunction(
"bitwise_or", self.bitwise_or, as_global=True, as_filter=True
),
TemplateFunction(
"bitwise_xor", self.bitwise_xor, as_global=True, as_filter=True
),
# Value constraint functions (as globals and filters)
TemplateFunction("clamp", self.clamp, as_global=True, as_filter=True),
TemplateFunction("wrap", self.wrap, as_global=True, as_filter=True),
TemplateFunction("remap", self.remap, as_global=True, as_filter=True),
],
)
@staticmethod
def logarithm(value: Any, base: Any = math.e, default: Any = _SENTINEL) -> Any:
"""Filter and function to get logarithm of the value with a specific base."""
try:
base_float = float(base)
except ValueError, TypeError:
if default is _SENTINEL:
raise_no_default("log", base)
return default
try:
value_float = float(value)
return math.log(value_float, base_float)
except ValueError, TypeError:
if default is _SENTINEL:
raise_no_default("log", value)
return default
@staticmethod
def sine(value: Any, default: Any = _SENTINEL) -> Any:
"""Filter and function to get sine of the value."""
try:
return math.sin(float(value))
except ValueError, TypeError:
if default is _SENTINEL:
raise_no_default("sin", value)
return default
@staticmethod
def cosine(value: Any, default: Any = _SENTINEL) -> Any:
"""Filter and function to get cosine of the value."""
try:
return math.cos(float(value))
except ValueError, TypeError:
if default is _SENTINEL:
raise_no_default("cos", value)
return default
@staticmethod
def tangent(value: Any, default: Any = _SENTINEL) -> Any:
"""Filter and function to get tangent of the value."""
try:
return math.tan(float(value))
except ValueError, TypeError:
if default is _SENTINEL:
raise_no_default("tan", value)
return default
@staticmethod
def arc_sine(value: Any, default: Any = _SENTINEL) -> Any:
"""Filter and function to get arc sine of the value."""
try:
return math.asin(float(value))
except ValueError, TypeError:
if default is _SENTINEL:
raise_no_default("asin", value)
return default
@staticmethod
def arc_cosine(value: Any, default: Any = _SENTINEL) -> Any:
"""Filter and function to get arc cosine of the value."""
try:
return math.acos(float(value))
except ValueError, TypeError:
if default is _SENTINEL:
raise_no_default("acos", value)
return default
@staticmethod
def arc_tangent(value: Any, default: Any = _SENTINEL) -> Any:
"""Filter and function to get arc tangent of the value."""
try:
return math.atan(float(value))
except ValueError, TypeError:
if default is _SENTINEL:
raise_no_default("atan", value)
return default
@staticmethod
def arc_tangent2(*args: Any, default: Any = _SENTINEL) -> Any:
"""Filter and function to calculate four quadrant arc tangent of y / x.
The parameters to atan2 may be passed either in an iterable or as separate arguments
The default value may be passed either as a positional or in a keyword argument
"""
try:
if 1 <= len(args) <= 2 and isinstance(args[0], (list, tuple)):
if len(args) == 2 and default is _SENTINEL:
# Default value passed as a positional argument
default = args[1]
args = tuple(args[0])
elif len(args) == 3 and default is _SENTINEL:
# Default value passed as a positional argument
default = args[2]
return math.atan2(float(args[0]), float(args[1]))
except ValueError, TypeError:
if default is _SENTINEL:
raise_no_default("atan2", args)
return default
@staticmethod
def square_root(value: Any, default: Any = _SENTINEL) -> Any:
"""Filter and function to get square root of the value."""
try:
return math.sqrt(float(value))
except ValueError, TypeError:
if default is _SENTINEL:
raise_no_default("sqrt", value)
return default
@staticmethod
def average(*args: Any, default: Any = _SENTINEL) -> Any:
"""Filter and function to calculate the arithmetic mean.
Calculates of an iterable or of two or more arguments.
The parameters may be passed as an iterable or as separate arguments.
"""
if len(args) == 0:
raise TypeError("average expected at least 1 argument, got 0")
# If first argument is iterable and more than 1 argument provided but not a named
# default, then use 2nd argument as default.
if isinstance(args[0], Iterable):
average_list = args[0]
if len(args) > 1 and default is _SENTINEL:
default = args[1]
elif len(args) == 1:
raise TypeError(f"'{type(args[0]).__name__}' object is not iterable")
else:
average_list = args
try:
return statistics.fmean(average_list)
except TypeError, statistics.StatisticsError:
if default is _SENTINEL:
raise_no_default("average", args)
return default
@staticmethod
def median(*args: Any, default: Any = _SENTINEL) -> Any:
"""Filter and function to calculate the median.
Calculates median of an iterable of two or more arguments.
The parameters may be passed as an iterable or as separate arguments.
"""
if len(args) == 0:
raise TypeError("median expected at least 1 argument, got 0")
# If first argument is a list or tuple and more than 1 argument provided but not a named
# default, then use 2nd argument as default.
if isinstance(args[0], Iterable):
median_list = args[0]
if len(args) > 1 and default is _SENTINEL:
default = args[1]
elif len(args) == 1:
raise TypeError(f"'{type(args[0]).__name__}' object is not iterable")
else:
median_list = args
try:
return statistics.median(median_list)
except TypeError, statistics.StatisticsError:
if default is _SENTINEL:
raise_no_default("median", args)
return default
@staticmethod
def statistical_mode(*args: Any, default: Any = _SENTINEL) -> Any:
"""Filter and function to calculate the statistical mode.
Calculates mode of an iterable of two or more arguments.
The parameters may be passed as an iterable or as separate arguments.
"""
if not args:
raise TypeError("statistical_mode expected at least 1 argument, got 0")
# If first argument is a list or tuple and more than 1 argument provided but not a named
# default, then use 2nd argument as default.
if len(args) == 1 and isinstance(args[0], Iterable):
mode_list = args[0]
elif isinstance(args[0], list | tuple):
mode_list = args[0]
if len(args) > 1 and default is _SENTINEL:
default = args[1]
elif len(args) == 1:
raise TypeError(f"'{type(args[0]).__name__}' object is not iterable")
else:
mode_list = args
try:
return statistics.mode(mode_list)
except TypeError, statistics.StatisticsError:
if default is _SENTINEL:
raise_no_default("statistical_mode", args)
return default
def min_max_from_filter(self, builtin_filter: Any, name: str) -> Any:
"""Convert a built-in min/max Jinja filter to a global function.
The parameters may be passed as an iterable or as separate arguments.
"""
@pass_environment
@wraps(builtin_filter)
def wrapper(environment: jinja2.Environment, *args: Any, **kwargs: Any) -> Any:
if len(args) == 0:
raise TypeError(f"{name} expected at least 1 argument, got 0")
if len(args) == 1:
if isinstance(args[0], Iterable):
return builtin_filter(environment, args[0], **kwargs)
raise TypeError(f"'{type(args[0]).__name__}' object is not iterable")
return builtin_filter(environment, args, **kwargs)
return pass_environment(wrapper)
def min_max_min(self, *args: Any, **kwargs: Any) -> Any:
"""Min function using built-in filter."""
return self.min_max_from_filter(self.environment.filters["min"], "min")(
self.environment, *args, **kwargs
)
def min_max_max(self, *args: Any, **kwargs: Any) -> Any:
"""Max function using built-in filter."""
return self.min_max_from_filter(self.environment.filters["max"], "max")(
self.environment, *args, **kwargs
)
@staticmethod
def bitwise_and(first_value: Any, second_value: Any) -> Any:
"""Perform a bitwise and operation."""
return first_value & second_value
@staticmethod
def bitwise_or(first_value: Any, second_value: Any) -> Any:
"""Perform a bitwise or operation."""
return first_value | second_value
@staticmethod
def bitwise_xor(first_value: Any, second_value: Any) -> Any:
"""Perform a bitwise xor operation."""
return first_value ^ second_value
@staticmethod
def clamp(value: Any, min_value: Any, max_value: Any) -> Any:
"""Filter and function to clamp a value between min and max bounds.
Constrains value to the range [min_value, max_value] (inclusive).
"""
try:
value_num = float(value)
min_value_num = float(min_value)
max_value_num = float(max_value)
except (ValueError, TypeError) as err:
raise ValueError(
f"function requires numeric arguments, "
f"got {value=}, {min_value=}, {max_value=}"
) from err
return max(min_value_num, min(max_value_num, value_num))
@staticmethod
def wrap(value: Any, min_value: Any, max_value: Any) -> Any:
"""Filter and function to wrap a value within a range.
Wraps value cyclically within [min_value, max_value) (inclusive min, exclusive max).
"""
try:
value_num = float(value)
min_value_num = float(min_value)
max_value_num = float(max_value)
except (ValueError, TypeError) as err:
raise ValueError(
f"function requires numeric arguments, "
f"got {value=}, {min_value=}, {max_value=}"
) from err
try:
range_size = max_value_num - min_value_num
return ((value_num - min_value_num) % range_size) + min_value_num
except ZeroDivisionError: # be lenient: if the range is empty, just clamp
return min_value_num
@staticmethod
def remap(
value: Any,
in_min: Any,
in_max: Any,
out_min: Any,
out_max: Any,
*,
steps: int = 0,
edges: Literal["none", "clamp", "wrap", "mirror"] = "none",
) -> Any:
"""Filter and function to remap a value from one range to another.
Maps value from input range [in_min, in_max] to output range [out_min, out_max].
The steps parameter, if greater than 0, quantizes the output into
the specified number of discrete steps.
The edges parameter controls how out-of-bounds input values are handled:
- "none": No special handling; values outside the input range are extrapolated into the output range.
- "clamp": Values outside the input range are clamped to the nearest boundary.
- "wrap": Values outside the input range are wrapped around cyclically.
- "mirror": Values outside the input range are mirrored back into the range.
"""
try:
value_num = float(value)
in_min_num = float(in_min)
in_max_num = float(in_max)
out_min_num = float(out_min)
out_max_num = float(out_max)
except (ValueError, TypeError) as err:
raise ValueError(
f"function requires numeric arguments, "
f"got {value=}, {in_min=}, {in_max=}, {out_min=}, {out_max=}"
) from err
# Apply edge behavior in original space for accuracy.
if edges == "clamp":
value_num = max(in_min_num, min(in_max_num, value_num))
elif edges == "wrap":
if in_min_num == in_max_num:
raise ValueError(f"{in_min=} must not equal {in_max=}")
range_size = in_max_num - in_min_num # Validated against div0 above.
value_num = ((value_num - in_min_num) % range_size) + in_min_num
elif edges == "mirror":
if in_min_num == in_max_num:
raise ValueError(f"{in_min=} must not equal {in_max=}")
range_size = in_max_num - in_min_num # Validated against div0 above.
# Determine which period we're in and whether it should be mirrored
offset = value_num - in_min_num
period = math.floor(offset / range_size)
position_in_period = offset - (period * range_size)
if (period < 0) or (period % 2 != 0):
position_in_period = range_size - position_in_period
value_num = in_min_num + position_in_period
# Unknown "edges" values are left as-is; no use throwing an error.
steps = max(steps, 0)
if not steps and (in_min_num == out_min_num and in_max_num == out_max_num):
return value_num # No remapping needed. Save some cycles and floating-point precision.
normalized = (value_num - in_min_num) / (in_max_num - in_min_num)
if steps:
normalized = round(normalized * steps) / steps
return out_min_num + (normalized * (out_max_num - out_min_num))
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/helpers/template/extensions/math.py",
"license": "Apache License 2.0",
"lines": 378,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/helpers/template/extensions/regex.py | """Jinja2 extension for regular expression functions."""
from __future__ import annotations
from functools import lru_cache
import re
from typing import TYPE_CHECKING, Any
from .base import BaseTemplateExtension, TemplateFunction
if TYPE_CHECKING:
from homeassistant.helpers.template import TemplateEnvironment
# Module-level regex cache shared across all instances
_regex_cache = lru_cache(maxsize=128)(re.compile)
class RegexExtension(BaseTemplateExtension):
"""Jinja2 extension for regular expression functions."""
def __init__(self, environment: TemplateEnvironment) -> None:
"""Initialize the regex extension."""
super().__init__(
environment,
functions=[
TemplateFunction(
"regex_match",
self.regex_match,
as_filter=True,
),
TemplateFunction(
"regex_search",
self.regex_search,
as_filter=True,
),
# Register tests with different names
TemplateFunction(
"match",
self.regex_match,
as_test=True,
),
TemplateFunction(
"search",
self.regex_search,
as_test=True,
),
TemplateFunction(
"regex_replace",
self.regex_replace,
as_filter=True,
),
TemplateFunction(
"regex_findall",
self.regex_findall,
as_filter=True,
),
TemplateFunction(
"regex_findall_index",
self.regex_findall_index,
as_filter=True,
),
],
)
def regex_match(self, value: Any, find: str = "", ignorecase: bool = False) -> bool:
"""Match value using regex."""
if not isinstance(value, str):
value = str(value)
flags = re.IGNORECASE if ignorecase else 0
return bool(_regex_cache(find, flags).match(value))
def regex_replace(
self,
value: Any = "",
find: str = "",
replace: str = "",
ignorecase: bool = False,
) -> str:
"""Replace using regex."""
if not isinstance(value, str):
value = str(value)
flags = re.IGNORECASE if ignorecase else 0
result = _regex_cache(find, flags).sub(replace, value)
return str(result)
def regex_search(
self, value: Any, find: str = "", ignorecase: bool = False
) -> bool:
"""Search using regex."""
if not isinstance(value, str):
value = str(value)
flags = re.IGNORECASE if ignorecase else 0
return bool(_regex_cache(find, flags).search(value))
def regex_findall_index(
self, value: Any, find: str = "", index: int = 0, ignorecase: bool = False
) -> str:
"""Find all matches using regex and then pick specific match index."""
return self.regex_findall(value, find, ignorecase)[index]
def regex_findall(
self, value: Any, find: str = "", ignorecase: bool = False
) -> list[str]:
"""Find all matches using regex."""
if not isinstance(value, str):
value = str(value)
flags = re.IGNORECASE if ignorecase else 0
return _regex_cache(find, flags).findall(value)
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/helpers/template/extensions/regex.py",
"license": "Apache License 2.0",
"lines": 95,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/helpers/template/extensions/string.py | """Jinja2 extension for string processing functions."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any
from urllib.parse import urlencode as urllib_urlencode
from homeassistant.util import slugify as slugify_util
from .base import BaseTemplateExtension, TemplateFunction
if TYPE_CHECKING:
from homeassistant.helpers.template import TemplateEnvironment
class StringExtension(BaseTemplateExtension):
"""Jinja2 extension for string processing functions."""
def __init__(self, environment: TemplateEnvironment) -> None:
"""Initialize the string extension."""
super().__init__(
environment,
functions=[
TemplateFunction(
"ordinal",
self.ordinal,
as_filter=True,
),
TemplateFunction(
"slugify",
self.slugify,
as_global=True,
as_filter=True,
),
TemplateFunction(
"urlencode",
self.urlencode,
as_global=True,
),
],
)
def ordinal(self, value: Any) -> str:
"""Perform ordinal conversion."""
suffixes = ["th", "st", "nd", "rd"] + ["th"] * 6 # codespell:ignore nd
return str(value) + (
suffixes[(int(str(value)[-1])) % 10]
if int(str(value)[-2:]) % 100 not in range(11, 14)
else "th"
)
def slugify(self, value: Any, separator: str = "_") -> str:
"""Convert a string into a slug, such as what is used for entity ids."""
return slugify_util(str(value), separator=separator)
def urlencode(self, value: Any) -> bytes:
"""Urlencode dictionary and return as UTF-8 string."""
return urllib_urlencode(value).encode("utf-8")
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/helpers/template/extensions/string.py",
"license": "Apache License 2.0",
"lines": 47,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:homeassistant/helpers/template/helpers.py | """Template helper functions for Home Assistant."""
from __future__ import annotations
from typing import TYPE_CHECKING, Any, NoReturn
import voluptuous as vol
from homeassistant.helpers import (
area_registry as ar,
device_registry as dr,
entity_registry as er,
)
from .context import template_cv
if TYPE_CHECKING:
from homeassistant.core import HomeAssistant
def raise_no_default(function: str, value: Any) -> NoReturn:
"""Raise ValueError when no default is specified for template functions."""
template, action = template_cv.get() or ("", "rendering or compiling")
raise ValueError(
f"Template error: {function} got invalid input '{value}' when {action} template"
f" '{template}' but no default was specified"
)
def resolve_area_id(hass: HomeAssistant, lookup_value: Any) -> str | None:
"""Resolve lookup value to an area ID.
Accepts area name, area alias, device ID, or entity ID.
Returns the area ID or None if not found.
"""
area_reg = ar.async_get(hass)
dev_reg = dr.async_get(hass)
ent_reg = er.async_get(hass)
lookup_str = str(lookup_value)
# Check if it's an area name
if area := area_reg.async_get_area_by_name(lookup_str):
return area.id
# Check if it's an area alias
areas_list = area_reg.async_get_areas_by_alias(lookup_str)
if areas_list:
return areas_list[0].id
# Import here, not at top-level to avoid circular import
from homeassistant.helpers import config_validation as cv # noqa: PLC0415
# Check if it's an entity ID
try:
cv.entity_id(lookup_value)
except vol.Invalid:
pass
else:
if entity := ent_reg.async_get(lookup_value):
# If entity has an area ID, return that
if entity.area_id:
return entity.area_id
# If entity has a device ID, return the area ID for the device
if entity.device_id and (device := dev_reg.async_get(entity.device_id)):
return device.area_id
# Check if it's a device ID
if device := dev_reg.async_get(lookup_value):
return device.area_id
return None
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/helpers/template/helpers.py",
"license": "Apache License 2.0",
"lines": 54,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:homeassistant/helpers/template/render_info.py | """Template render information tracking for Home Assistant."""
from __future__ import annotations
import collections.abc
from collections.abc import Callable
from contextvars import ContextVar
from typing import TYPE_CHECKING, cast
from homeassistant.core import split_entity_id
if TYPE_CHECKING:
from homeassistant.exceptions import TemplateError
from . import Template
# Rate limiting constants
ALL_STATES_RATE_LIMIT = 60 # seconds
DOMAIN_STATES_RATE_LIMIT = 1 # seconds
# Context variable for render information tracking
render_info_cv: ContextVar[RenderInfo | None] = ContextVar(
"render_info_cv", default=None
)
# Filter functions for efficiency
def _true(entity_id: str) -> bool:
"""Return True for all entity IDs."""
return True
def _false(entity_id: str) -> bool:
"""Return False for all entity IDs."""
return False
class RenderInfo:
"""Holds information about a template render."""
__slots__ = (
"_result",
"all_states",
"all_states_lifecycle",
"domains",
"domains_lifecycle",
"entities",
"exception",
"filter",
"filter_lifecycle",
"has_time",
"is_static",
"rate_limit",
"template",
)
def __init__(self, template: Template) -> None:
"""Initialise."""
self.template = template
# Will be set sensibly once frozen.
self.filter_lifecycle: Callable[[str], bool] = _true
self.filter: Callable[[str], bool] = _true
self._result: str | None = None
self.is_static = False
self.exception: TemplateError | None = None
self.all_states = False
self.all_states_lifecycle = False
self.domains: collections.abc.Set[str] = set()
self.domains_lifecycle: collections.abc.Set[str] = set()
self.entities: collections.abc.Set[str] = set()
self.rate_limit: float | None = None
self.has_time = False
def __repr__(self) -> str:
"""Representation of RenderInfo."""
return (
f"<RenderInfo {self.template}"
f" all_states={self.all_states}"
f" all_states_lifecycle={self.all_states_lifecycle}"
f" domains={self.domains}"
f" domains_lifecycle={self.domains_lifecycle}"
f" entities={self.entities}"
f" rate_limit={self.rate_limit}"
f" has_time={self.has_time}"
f" exception={self.exception}"
f" is_static={self.is_static}"
">"
)
def _filter_domains_and_entities(self, entity_id: str) -> bool:
"""Template should re-render if the entity state changes.
Only when we match specific domains or entities.
"""
return (
split_entity_id(entity_id)[0] in self.domains or entity_id in self.entities
)
def _filter_entities(self, entity_id: str) -> bool:
"""Template should re-render if the entity state changes.
Only when we match specific entities.
"""
return entity_id in self.entities
def _filter_lifecycle_domains(self, entity_id: str) -> bool:
"""Template should re-render if the entity is added or removed.
Only with domains watched.
"""
return split_entity_id(entity_id)[0] in self.domains_lifecycle
def result(self) -> str:
"""Results of the template computation."""
if self.exception is not None:
raise self.exception
return cast(str, self._result)
def _freeze_static(self) -> None:
self.is_static = True
self._freeze_sets()
self.all_states = False
def _freeze_sets(self) -> None:
self.entities = frozenset(self.entities)
self.domains = frozenset(self.domains)
self.domains_lifecycle = frozenset(self.domains_lifecycle)
def _freeze(self) -> None:
self._freeze_sets()
if self.rate_limit is None:
if self.all_states or self.exception:
self.rate_limit = ALL_STATES_RATE_LIMIT
elif self.domains or self.domains_lifecycle:
self.rate_limit = DOMAIN_STATES_RATE_LIMIT
if self.exception:
return
if not self.all_states_lifecycle:
if self.domains_lifecycle:
self.filter_lifecycle = self._filter_lifecycle_domains
else:
self.filter_lifecycle = _false
if self.all_states:
return
if self.domains:
self.filter = self._filter_domains_and_entities
elif self.entities:
self.filter = self._filter_entities
else:
self.filter = _false
| {
"repo_id": "home-assistant/core",
"file_path": "homeassistant/helpers/template/render_info.py",
"license": "Apache License 2.0",
"lines": 124,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_complex |
home-assistant/core:script/hassfest/integration_info.py | """Write integration constants."""
from __future__ import annotations
from .model import Config, Integration
from .serializer import format_python
def validate(integrations: dict[str, Integration], config: Config) -> None:
"""Validate integrations file."""
if config.specific_integrations:
return
int_type = "entity"
domains = [
integration.domain
for integration in integrations.values()
if integration.manifest.get("integration_type") == int_type
# Tag is type "entity" but has no entity platform
and integration.domain != "tag"
]
code = [
"from enum import StrEnum",
"class EntityPlatforms(StrEnum):",
f' """Available {int_type} platforms."""',
]
code.extend([f' {domain.upper()} = "{domain}"' for domain in sorted(domains)])
config.cache[f"integrations_{int_type}"] = format_python(
"\n".join(code), generator="script.hassfest"
)
def generate(integrations: dict[str, Integration], config: Config) -> None:
"""Generate integration file."""
int_type = "entity"
filename = "entity_platforms"
platform_path = config.root / f"homeassistant/generated/{filename}.py"
platform_path.write_text(config.cache[f"integrations_{int_type}"])
| {
"repo_id": "home-assistant/core",
"file_path": "script/hassfest/integration_info.py",
"license": "Apache License 2.0",
"lines": 31,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | function_simple |
home-assistant/core:tests/components/airos/test_init.py | """Test for airOS integration setup."""
from __future__ import annotations
from unittest.mock import ANY, AsyncMock, MagicMock
from airos.exceptions import (
AirOSConnectionAuthenticationError,
AirOSConnectionSetupError,
AirOSDeviceConnectionError,
AirOSKeyDataMissingError,
)
import pytest
from homeassistant.components.airos.const import (
DEFAULT_SSL,
DEFAULT_VERIFY_SSL,
DOMAIN,
SECTION_ADVANCED_SETTINGS,
)
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.components.sensor import DOMAIN as SENSOR_DOMAIN
from homeassistant.config_entries import SOURCE_USER, ConfigEntryState
from homeassistant.const import (
CONF_HOST,
CONF_PASSWORD,
CONF_SSL,
CONF_USERNAME,
CONF_VERIFY_SSL,
)
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr, entity_registry as er
from tests.common import MockConfigEntry
MOCK_CONFIG_V1 = {
CONF_HOST: "1.1.1.1",
CONF_USERNAME: "ubnt",
CONF_PASSWORD: "test-password",
}
MOCK_CONFIG_PLAIN = {
CONF_HOST: "1.1.1.1",
CONF_USERNAME: "ubnt",
CONF_PASSWORD: "test-password",
SECTION_ADVANCED_SETTINGS: {
CONF_SSL: False,
CONF_VERIFY_SSL: False,
},
}
MOCK_CONFIG_V1_2 = {
CONF_HOST: "1.1.1.1",
CONF_USERNAME: "ubnt",
CONF_PASSWORD: "test-password",
SECTION_ADVANCED_SETTINGS: {
CONF_SSL: DEFAULT_SSL,
CONF_VERIFY_SSL: DEFAULT_VERIFY_SSL,
},
}
async def test_setup_entry_with_default_ssl(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_airos_class: MagicMock,
mock_airos_client: MagicMock,
mock_async_get_firmware_data: AsyncMock,
) -> None:
"""Test setting up a config entry with default SSL options."""
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
assert mock_config_entry.state is ConfigEntryState.LOADED
mock_airos_class.assert_called_once_with(
host=mock_config_entry.data[CONF_HOST],
username=mock_config_entry.data[CONF_USERNAME],
password=mock_config_entry.data[CONF_PASSWORD],
session=ANY,
use_ssl=DEFAULT_SSL,
)
assert mock_config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL] is True
assert mock_config_entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL] is False
async def test_setup_entry_without_ssl(
hass: HomeAssistant,
mock_airos_class: MagicMock,
mock_airos_client: MagicMock,
mock_async_get_firmware_data: AsyncMock,
) -> None:
"""Test setting up a config entry adjusted to plain HTTP."""
entry = MockConfigEntry(
domain=DOMAIN,
data=MOCK_CONFIG_PLAIN,
entry_id="1",
unique_id="airos_device",
version=1,
minor_version=2,
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state is ConfigEntryState.LOADED
mock_airos_class.assert_called_once_with(
host=entry.data[CONF_HOST],
username=entry.data[CONF_USERNAME],
password=entry.data[CONF_PASSWORD],
session=ANY,
use_ssl=False,
)
assert entry.data[SECTION_ADVANCED_SETTINGS][CONF_SSL] is False
assert entry.data[SECTION_ADVANCED_SETTINGS][CONF_VERIFY_SSL] is False
async def test_ssl_migrate_entry(
hass: HomeAssistant,
mock_airos_client: MagicMock,
mock_async_get_firmware_data: AsyncMock,
) -> None:
"""Test migrate entry SSL options."""
entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data=MOCK_CONFIG_V1,
entry_id="1",
unique_id="airos_device",
version=1,
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state is ConfigEntryState.LOADED
assert entry.version == 2
assert entry.minor_version == 1
assert entry.data == MOCK_CONFIG_V1_2
@pytest.mark.parametrize(
("sensor_domain", "sensor_name", "mock_id"),
[
(BINARY_SENSOR_DOMAIN, "port_forwarding", "device_id_12345"),
(SENSOR_DOMAIN, "antenna_gain", "01:23:45:67:89:ab"),
],
)
async def test_uid_migrate_entry(
hass: HomeAssistant,
device_registry: dr.DeviceRegistry,
sensor_domain: str,
sensor_name: str,
mock_id: str,
mock_airos_client: MagicMock,
mock_async_get_firmware_data: AsyncMock,
) -> None:
"""Test migrate entry unique id."""
entity_registry = er.async_get(hass)
MOCK_MAC = dr.format_mac("01:23:45:67:89:AB")
MOCK_ID = "device_id_12345"
old_unique_id = f"{mock_id}_{sensor_name}"
new_unique_id = f"{MOCK_MAC}_{sensor_name}"
entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data=MOCK_CONFIG_V1_2,
entry_id="1",
unique_id=mock_id,
version=1,
minor_version=2,
)
entry.add_to_hass(hass)
device_registry.async_get_or_create(
config_entry_id=entry.entry_id,
identifiers={(DOMAIN, MOCK_ID)},
connections={
(dr.CONNECTION_NETWORK_MAC, MOCK_MAC),
},
)
await hass.async_block_till_done()
old_entity_entry = entity_registry.async_get_or_create(
DOMAIN, sensor_domain, old_unique_id, config_entry=entry
)
original_entity_id = old_entity_entry.entity_id
hass.config_entries.async_update_entry(entry, unique_id=MOCK_MAC)
await hass.async_block_till_done()
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
updated_entity_entry = entity_registry.async_get(original_entity_id)
assert entry.state is ConfigEntryState.LOADED
assert entry.version == 2
assert entry.minor_version == 1
assert (
entity_registry.async_get_entity_id(sensor_domain, DOMAIN, old_unique_id)
is None
)
assert updated_entity_entry.unique_id == new_unique_id
async def test_migrate_future_return(
hass: HomeAssistant,
mock_airos_client: MagicMock,
mock_async_get_firmware_data: AsyncMock,
) -> None:
"""Test migrate entry unique id."""
entry = MockConfigEntry(
domain=DOMAIN,
source=SOURCE_USER,
data=MOCK_CONFIG_V1_2,
entry_id="1",
unique_id="airos_device",
version=3,
)
entry.add_to_hass(hass)
await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
assert entry.state is ConfigEntryState.MIGRATION_ERROR
async def test_load_unload_entry(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_airos_client: MagicMock,
mock_async_get_firmware_data: AsyncMock,
) -> None:
"""Test setup and unload config entry."""
mock_config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(mock_config_entry.entry_id)
await hass.async_block_till_done()
assert mock_config_entry.state is ConfigEntryState.LOADED
assert await hass.config_entries.async_unload(mock_config_entry.entry_id)
await hass.async_block_till_done()
assert mock_config_entry.state is ConfigEntryState.NOT_LOADED
@pytest.mark.parametrize(
("exception", "state"),
[
(AirOSConnectionAuthenticationError, ConfigEntryState.SETUP_ERROR),
(AirOSConnectionSetupError, ConfigEntryState.SETUP_RETRY),
(AirOSDeviceConnectionError, ConfigEntryState.SETUP_RETRY),
(AirOSKeyDataMissingError, ConfigEntryState.SETUP_ERROR),
(Exception, ConfigEntryState.SETUP_ERROR),
],
)
async def test_setup_entry_failure(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_airos_class: MagicMock,
mock_airos_client: MagicMock,
mock_async_get_firmware_data: AsyncMock,
exception: Exception,
state: ConfigEntryState,
) -> None:
"""Test config entry setup failure."""
mock_async_get_firmware_data.side_effect = exception
mock_config_entry.add_to_hass(hass)
result = await hass.config_entries.async_setup(mock_config_entry.entry_id)
assert result is False
assert mock_config_entry.state == state
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/airos/test_init.py",
"license": "Apache License 2.0",
"lines": 236,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/alexa_devices/test_coordinator.py | """Tests for the Alexa Devices coordinator."""
from unittest.mock import AsyncMock
from freezegun.api import FrozenDateTimeFactory
from homeassistant.components.alexa_devices.coordinator import SCAN_INTERVAL
from homeassistant.const import STATE_ON
from homeassistant.core import HomeAssistant
from . import setup_integration
from .const import TEST_DEVICE_1, TEST_DEVICE_1_SN, TEST_DEVICE_2, TEST_DEVICE_2_SN
from tests.common import MockConfigEntry, async_fire_time_changed
async def test_coordinator_stale_device(
hass: HomeAssistant,
freezer: FrozenDateTimeFactory,
mock_amazon_devices_client: AsyncMock,
mock_config_entry: MockConfigEntry,
) -> None:
"""Test coordinator data update removes stale Alexa devices."""
entity_id_0 = "binary_sensor.echo_test_connectivity"
entity_id_1 = "binary_sensor.echo_test_2_connectivity"
mock_amazon_devices_client.get_devices_data.return_value = {
TEST_DEVICE_1_SN: TEST_DEVICE_1,
TEST_DEVICE_2_SN: TEST_DEVICE_2,
}
await setup_integration(hass, mock_config_entry)
assert (state := hass.states.get(entity_id_0))
assert state.state == STATE_ON
assert (state := hass.states.get(entity_id_1))
assert state.state == STATE_ON
mock_amazon_devices_client.get_devices_data.return_value = {
TEST_DEVICE_1_SN: TEST_DEVICE_1,
}
freezer.tick(SCAN_INTERVAL)
async_fire_time_changed(hass)
await hass.async_block_till_done()
assert (state := hass.states.get(entity_id_0))
assert state.state == STATE_ON
# Entity is removed
assert not hass.states.get(entity_id_1)
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/alexa_devices/test_coordinator.py",
"license": "Apache License 2.0",
"lines": 37,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/aosmith/test_select.py | """Tests for the select platform of the A. O. Smith integration."""
from collections.abc import AsyncGenerator
from unittest.mock import MagicMock, patch
from py_aosmith.models import OperationMode
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.select import (
DOMAIN as SELECT_DOMAIN,
SERVICE_SELECT_OPTION,
)
from homeassistant.const import ATTR_ENTITY_ID, ATTR_OPTION, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from tests.common import MockConfigEntry, snapshot_platform
@pytest.fixture(autouse=True)
async def platforms() -> AsyncGenerator[None]:
"""Return the platforms to be loaded for this test."""
with patch("homeassistant.components.aosmith.PLATFORMS", [Platform.SELECT]):
yield
@pytest.mark.parametrize(
("get_devices_fixture_supports_hot_water_plus"),
[True],
)
async def test_state(
hass: HomeAssistant,
init_integration: MockConfigEntry,
snapshot: SnapshotAssertion,
entity_registry: er.EntityRegistry,
) -> None:
"""Test the state of the select entity."""
await snapshot_platform(hass, entity_registry, snapshot, init_integration.entry_id)
@pytest.mark.parametrize(
("get_devices_fixture_supports_hot_water_plus"),
[True],
)
@pytest.mark.parametrize(
("hass_level", "aosmith_level"),
[
("off", 0),
("level1", 1),
("level2", 2),
("level3", 3),
],
)
async def test_set_hot_water_plus_level(
hass: HomeAssistant,
mock_client: MagicMock,
init_integration: MockConfigEntry,
hass_level: str,
aosmith_level: int,
) -> None:
"""Test setting the Hot Water+ level."""
await hass.services.async_call(
SELECT_DOMAIN,
SERVICE_SELECT_OPTION,
{
ATTR_ENTITY_ID: "select.my_water_heater_hot_water_plus_level",
ATTR_OPTION: hass_level,
},
)
await hass.async_block_till_done()
mock_client.update_mode.assert_called_once_with(
junction_id="junctionId",
mode=OperationMode.HEAT_PUMP,
hot_water_plus_level=aosmith_level,
)
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/aosmith/test_select.py",
"license": "Apache License 2.0",
"lines": 66,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/camera/test_prefs.py | """Test camera helper functions."""
import pytest
from homeassistant.components.camera.const import DATA_CAMERA_PREFS
from homeassistant.components.camera.prefs import (
CameraPreferences,
DynamicStreamSettings,
get_dynamic_camera_stream_settings,
)
from homeassistant.components.stream import Orientation
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError
async def test_get_dynamic_camera_stream_settings_missing_prefs(
hass: HomeAssistant,
) -> None:
"""Test get_dynamic_camera_stream_settings when camera prefs are not set up."""
with pytest.raises(HomeAssistantError, match="Camera integration not set up"):
await get_dynamic_camera_stream_settings(hass, "camera.test")
async def test_get_dynamic_camera_stream_settings_success(hass: HomeAssistant) -> None:
"""Test successful retrieval of dynamic camera stream settings."""
# Set up camera preferences
prefs = CameraPreferences(hass)
await prefs.async_load()
hass.data[DATA_CAMERA_PREFS] = prefs
# Test with default settings
settings = await get_dynamic_camera_stream_settings(hass, "camera.test")
assert settings.orientation == Orientation.NO_TRANSFORM
assert settings.preload_stream is False
async def test_get_dynamic_camera_stream_settings_with_custom_orientation(
hass: HomeAssistant,
) -> None:
"""Test get_dynamic_camera_stream_settings with custom orientation set."""
# Set up camera preferences
prefs = CameraPreferences(hass)
await prefs.async_load()
hass.data[DATA_CAMERA_PREFS] = prefs
# Set custom orientation - this requires entity registry
# For this test, we'll directly manipulate the internal state
# since entity registry setup is complex for a unit test
test_settings = DynamicStreamSettings(
orientation=Orientation.ROTATE_LEFT, preload_stream=False
)
prefs._dynamic_stream_settings_by_entity_id["camera.test"] = test_settings
settings = await get_dynamic_camera_stream_settings(hass, "camera.test")
assert settings.orientation == Orientation.ROTATE_LEFT
assert settings.preload_stream is False
async def test_get_dynamic_camera_stream_settings_with_preload_stream(
hass: HomeAssistant,
) -> None:
"""Test get_dynamic_camera_stream_settings with preload stream enabled."""
# Set up camera preferences
prefs = CameraPreferences(hass)
await prefs.async_load()
hass.data[DATA_CAMERA_PREFS] = prefs
# Set preload stream by directly setting the dynamic stream settings
test_settings = DynamicStreamSettings(
orientation=Orientation.NO_TRANSFORM, preload_stream=True
)
prefs._dynamic_stream_settings_by_entity_id["camera.test"] = test_settings
settings = await get_dynamic_camera_stream_settings(hass, "camera.test")
assert settings.orientation == Orientation.NO_TRANSFORM
assert settings.preload_stream is True
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/camera/test_prefs.py",
"license": "Apache License 2.0",
"lines": 61,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/compit/consts.py | """Constants for the Compit component tests."""
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
CONFIG_INPUT = {
CONF_EMAIL: "test@example.com",
CONF_PASSWORD: "password",
}
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/compit/consts.py",
"license": "Apache License 2.0",
"lines": 6,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/compit/test_config_flow.py | """Test the Compit config flow."""
from unittest.mock import AsyncMock
import pytest
from homeassistant import config_entries
from homeassistant.components.compit.config_flow import CannotConnect, InvalidAuth
from homeassistant.components.compit.const import DOMAIN
from homeassistant.const import CONF_EMAIL, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from .consts import CONFIG_INPUT
from tests.common import MockConfigEntry
async def test_async_step_user_success(
hass: HomeAssistant, mock_compit_api: AsyncMock, mock_setup_entry: AsyncMock
) -> None:
"""Test user step with successful authentication."""
mock_compit_api.return_value = True
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == config_entries.SOURCE_USER
assert result["description_placeholders"] == {
"compit_url": "https://inext.compit.pl/"
}
result = await hass.config_entries.flow.async_configure(
result["flow_id"], CONFIG_INPUT
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == CONFIG_INPUT[CONF_EMAIL]
assert result["data"] == CONFIG_INPUT
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
("exception", "expected_error"),
[
(InvalidAuth(), "invalid_auth"),
(CannotConnect(), "cannot_connect"),
(Exception(), "unknown"),
(False, "unknown"),
],
)
async def test_async_step_user_failed_auth(
hass: HomeAssistant,
exception: Exception,
expected_error: str,
mock_compit_api: AsyncMock,
mock_setup_entry: AsyncMock,
) -> None:
"""Test user step with invalid authentication then success after error is cleared."""
mock_compit_api.side_effect = [exception, True]
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] is FlowResultType.FORM
assert result["step_id"] == config_entries.SOURCE_USER
result = await hass.config_entries.flow.async_configure(
result["flow_id"], CONFIG_INPUT
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {"base": expected_error}
# Test success after error is cleared
result = await hass.config_entries.flow.async_configure(
result["flow_id"], CONFIG_INPUT
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == CONFIG_INPUT[CONF_EMAIL]
assert result["data"] == CONFIG_INPUT
assert len(mock_setup_entry.mock_calls) == 1
async def test_async_step_reauth_success(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_compit_api: AsyncMock,
mock_setup_entry: AsyncMock,
) -> None:
"""Test reauth step with successful authentication."""
mock_compit_api.return_value = True
mock_config_entry.add_to_hass(hass)
result = await mock_config_entry.start_reauth_flow(hass)
assert result["step_id"] == "reauth_confirm"
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_PASSWORD: "new-password"}
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "reauth_successful"
assert mock_config_entry.data == {
CONF_EMAIL: CONFIG_INPUT[CONF_EMAIL],
CONF_PASSWORD: "new-password",
}
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
("exception", "expected_error"),
[
(InvalidAuth(), "invalid_auth"),
(CannotConnect(), "cannot_connect"),
(Exception(), "unknown"),
],
)
async def test_async_step_reauth_confirm_failed_auth(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
exception: Exception,
expected_error: str,
mock_compit_api: AsyncMock,
mock_setup_entry: AsyncMock,
) -> None:
"""Test reauth confirm step with invalid authentication then success after error is cleared."""
mock_compit_api.side_effect = [exception, True]
mock_config_entry.add_to_hass(hass)
result = await mock_config_entry.start_reauth_flow(hass)
assert result["step_id"] == "reauth_confirm"
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {}
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {CONF_PASSWORD: "new-password"}
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {"base": expected_error}
# Test success after error is cleared
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{CONF_EMAIL: CONFIG_INPUT[CONF_EMAIL], CONF_PASSWORD: "correct-password"},
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "reauth_successful"
assert mock_config_entry.data == {
CONF_EMAIL: CONFIG_INPUT[CONF_EMAIL],
CONF_PASSWORD: "correct-password",
}
assert len(mock_setup_entry.mock_calls) == 1
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/compit/test_config_flow.py",
"license": "Apache License 2.0",
"lines": 130,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/cync/const.py | """Test constants used in Cync tests."""
import time
import pycync
MOCKED_USER = pycync.User(
"test_token",
"test_refresh_token",
"test_authorize_string",
123456789,
expires_at=(time.time() * 1000) + 3600000,
)
SECOND_MOCKED_USER = pycync.User(
"test_token_2",
"test_refresh_token_2",
"test_authorize_string_2",
987654321,
expires_at=(time.time() * 1000) + 3600000,
)
MOCKED_EMAIL = "test@testuser.com"
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/cync/const.py",
"license": "Apache License 2.0",
"lines": 18,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/cync/test_config_flow.py | """Test the Cync config flow."""
from unittest.mock import ANY, AsyncMock, MagicMock
from pycync.exceptions import AuthFailedError, CyncError, TwoFactorRequiredError
import pytest
from homeassistant.components.cync.const import (
CONF_AUTHORIZE_STRING,
CONF_EXPIRES_AT,
CONF_REFRESH_TOKEN,
CONF_TWO_FACTOR_CODE,
CONF_USER_ID,
DOMAIN,
)
from homeassistant.config_entries import SOURCE_USER
from homeassistant.const import CONF_ACCESS_TOKEN, CONF_EMAIL, CONF_PASSWORD
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from .const import MOCKED_EMAIL, MOCKED_USER, SECOND_MOCKED_USER
from tests.common import MockConfigEntry
async def test_form_auth_success(
hass: HomeAssistant, mock_setup_entry: AsyncMock
) -> None:
"""Test that an auth flow without two factor succeeds."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {}
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_EMAIL: MOCKED_EMAIL,
CONF_PASSWORD: "test-password",
},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == MOCKED_EMAIL
assert result["data"] == {
CONF_USER_ID: MOCKED_USER.user_id,
CONF_AUTHORIZE_STRING: "test_authorize_string",
CONF_EXPIRES_AT: ANY,
CONF_ACCESS_TOKEN: "test_token",
CONF_REFRESH_TOKEN: "test_refresh_token",
}
assert result["result"].unique_id == str(MOCKED_USER.user_id)
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_two_factor_success(
hass: HomeAssistant, mock_setup_entry: AsyncMock, auth_client: MagicMock
) -> None:
"""Test we handle a request for a two factor code."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
auth_client.login.side_effect = TwoFactorRequiredError
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_EMAIL: MOCKED_EMAIL,
CONF_PASSWORD: "test-password",
},
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {}
assert result["step_id"] == "two_factor"
# Enter two factor code
auth_client.login.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_TWO_FACTOR_CODE: "123456",
},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == MOCKED_EMAIL
assert result["data"] == {
CONF_USER_ID: MOCKED_USER.user_id,
CONF_AUTHORIZE_STRING: "test_authorize_string",
CONF_EXPIRES_AT: ANY,
CONF_ACCESS_TOKEN: "test_token",
CONF_REFRESH_TOKEN: "test_refresh_token",
}
assert result["result"].unique_id == str(MOCKED_USER.user_id)
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_reauth_success(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_setup_entry: AsyncMock,
auth_client: MagicMock,
) -> None:
"""Test we handle re-authentication with two-factor."""
mock_config_entry.add_to_hass(hass)
result = await mock_config_entry.start_reauth_flow(hass)
assert result["step_id"] == "reauth_confirm"
auth_client.login.side_effect = TwoFactorRequiredError
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_EMAIL: MOCKED_EMAIL,
CONF_PASSWORD: "test-password",
},
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {}
assert result["step_id"] == "two_factor"
# Enter two factor code
auth_client.login.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_TWO_FACTOR_CODE: "123456",
},
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "reauth_successful"
assert mock_config_entry.data == {
CONF_USER_ID: MOCKED_USER.user_id,
CONF_AUTHORIZE_STRING: "test_authorize_string",
CONF_EXPIRES_AT: ANY,
CONF_ACCESS_TOKEN: "test_token",
CONF_REFRESH_TOKEN: "test_refresh_token",
}
assert len(mock_setup_entry.mock_calls) == 1
async def test_form_reauth_unique_id_mismatch(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
auth_client: MagicMock,
) -> None:
"""Test we handle a unique ID mismatch when re-authenticating."""
mock_config_entry.add_to_hass(hass)
result = await mock_config_entry.start_reauth_flow(hass)
assert result["step_id"] == "reauth_confirm"
auth_client.user = SECOND_MOCKED_USER
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_EMAIL: MOCKED_EMAIL,
CONF_PASSWORD: "test-password",
},
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "unique_id_mismatch"
async def test_form_unique_id_already_exists(
hass: HomeAssistant, mock_config_entry: MockConfigEntry
) -> None:
"""Test that setting up a config with a unique ID that already exists fails."""
mock_config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {}
assert result["step_id"] == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_EMAIL: MOCKED_EMAIL,
CONF_PASSWORD: "test-password",
},
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "already_configured"
@pytest.mark.parametrize(
("error_type", "error_string"),
[
(AuthFailedError, "invalid_auth"),
(CyncError, "cannot_connect"),
(Exception, "unknown"),
],
)
async def test_form_two_factor_errors(
hass: HomeAssistant,
mock_setup_entry: AsyncMock,
auth_client: MagicMock,
error_type: Exception,
error_string: str,
) -> None:
"""Test we handle a request for a two factor code with errors."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
auth_client.login.side_effect = TwoFactorRequiredError
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_EMAIL: MOCKED_EMAIL,
CONF_PASSWORD: "test-password",
},
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {}
assert result["step_id"] == "two_factor"
# Enter two factor code
auth_client.login.side_effect = error_type
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_TWO_FACTOR_CODE: "123456",
},
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {"base": error_string}
assert result["step_id"] == "user"
# Make sure the config flow tests finish with either an
# FlowResultType.CREATE_ENTRY or FlowResultType.ABORT so
# we can show the config flow is able to recover from an error.
auth_client.login.side_effect = TwoFactorRequiredError
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_EMAIL: MOCKED_EMAIL,
CONF_PASSWORD: "test-password",
},
)
# Enter two factor code
auth_client.login.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_TWO_FACTOR_CODE: "567890",
},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == MOCKED_EMAIL
assert result["data"] == {
CONF_USER_ID: MOCKED_USER.user_id,
CONF_AUTHORIZE_STRING: "test_authorize_string",
CONF_EXPIRES_AT: ANY,
CONF_ACCESS_TOKEN: "test_token",
CONF_REFRESH_TOKEN: "test_refresh_token",
}
assert result["result"].unique_id == str(MOCKED_USER.user_id)
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
("error_type", "error_string"),
[
(AuthFailedError, "invalid_auth"),
(CyncError, "cannot_connect"),
(Exception, "unknown"),
],
)
async def test_form_errors(
hass: HomeAssistant,
mock_setup_entry: AsyncMock,
auth_client: MagicMock,
error_type: Exception,
error_string: str,
) -> None:
"""Test we handle errors in the user step of the setup."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
auth_client.login.side_effect = error_type
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_EMAIL: MOCKED_EMAIL,
CONF_PASSWORD: "test-password",
},
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {"base": error_string}
assert result["step_id"] == "user"
# Make sure the config flow tests finish with either an
# FlowResultType.CREATE_ENTRY or FlowResultType.ABORT so
# we can show the config flow is able to recover from an error.
auth_client.login.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_EMAIL: MOCKED_EMAIL,
CONF_PASSWORD: "test-password",
},
)
assert result["type"] is FlowResultType.CREATE_ENTRY
assert result["title"] == MOCKED_EMAIL
assert result["data"] == {
CONF_USER_ID: MOCKED_USER.user_id,
CONF_AUTHORIZE_STRING: "test_authorize_string",
CONF_EXPIRES_AT: ANY,
CONF_ACCESS_TOKEN: "test_token",
CONF_REFRESH_TOKEN: "test_refresh_token",
}
assert result["result"].unique_id == str(MOCKED_USER.user_id)
assert len(mock_setup_entry.mock_calls) == 1
@pytest.mark.parametrize(
("error_type", "error_string"),
[
(AuthFailedError, "invalid_auth"),
(CyncError, "cannot_connect"),
(Exception, "unknown"),
],
)
async def test_form_reauth_errors(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_setup_entry: AsyncMock,
auth_client: MagicMock,
error_type: Exception,
error_string: str,
) -> None:
"""Test we handle errors in the reauth flow."""
mock_config_entry.add_to_hass(hass)
result = await mock_config_entry.start_reauth_flow(hass)
assert result["step_id"] == "reauth_confirm"
auth_client.login.side_effect = error_type
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_EMAIL: MOCKED_EMAIL,
CONF_PASSWORD: "test-password",
},
)
assert result["type"] is FlowResultType.FORM
assert result["errors"] == {"base": error_string}
assert result["step_id"] == "reauth_confirm"
# Make sure the config flow tests finish with FlowResultType.ABORT so
# we can show the config flow is able to recover from an error.
auth_client.login.side_effect = None
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_EMAIL: MOCKED_EMAIL,
CONF_PASSWORD: "test-password",
},
)
assert result["type"] is FlowResultType.ABORT
assert result["reason"] == "reauth_successful"
assert mock_config_entry.data == {
CONF_USER_ID: MOCKED_USER.user_id,
CONF_AUTHORIZE_STRING: "test_authorize_string",
CONF_EXPIRES_AT: ANY,
CONF_ACCESS_TOKEN: "test_token",
CONF_REFRESH_TOKEN: "test_refresh_token",
}
assert len(mock_setup_entry.mock_calls) == 1
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/cync/test_config_flow.py",
"license": "Apache License 2.0",
"lines": 335,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/cync/test_light.py | """Tests for the Cync integration light platform."""
from unittest.mock import AsyncMock
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import setup_integration
from tests.common import MockConfigEntry, snapshot_platform
async def test_entities(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
snapshot: SnapshotAssertion,
entity_registry: er.EntityRegistry,
) -> None:
"""Test that light attributes are properly set on setup."""
await setup_integration(hass, mock_config_entry)
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
@pytest.mark.parametrize(
("input_parameters", "expected_brightness", "expected_color_temp", "expected_rgb"),
[
({"brightness_pct": 100, "color_temp_kelvin": 2500}, 100, 10, None),
(
{"brightness_pct": 100, "rgb_color": (50, 100, 150)},
100,
None,
(50, 100, 150),
),
({"color_temp_kelvin": 2500}, 90, 10, None),
({"rgb_color": (50, 100, 150)}, 90, None, (50, 100, 150)),
],
)
async def test_turn_on(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
input_parameters: dict,
expected_brightness: int | None,
expected_color_temp: int | None,
expected_rgb: tuple[int, int, int] | None,
) -> None:
"""Test that turning on the light changes all necessary attributes."""
await setup_integration(hass, mock_config_entry)
assert hass.states.get("light.lamp_bulb_1").state == "off"
entity_id_parameter = {"entity_id": "light.lamp_bulb_1"}
action_parameters = entity_id_parameter | input_parameters
test_device = mock_config_entry.runtime_data.data.get(1111)
test_device.set_combo = AsyncMock(name="set_combo")
# now call the HA turn_on service
await hass.services.async_call(
"light",
"turn_on",
action_parameters,
blocking=True,
)
test_device.set_combo.assert_called_once_with(
True, expected_brightness, expected_color_temp, expected_rgb
)
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/cync/test_light.py",
"license": "Apache License 2.0",
"lines": 56,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/deluge/test_coordinator.py | """Test Deluge coordinator.py methods."""
from homeassistant.components.deluge.const import DelugeSensorType
from homeassistant.components.deluge.coordinator import count_states
from . import GET_TORRENT_STATES_RESPONSE
def test_get_count() -> None:
"""Tests count_states()."""
states = count_states(GET_TORRENT_STATES_RESPONSE)
assert states[DelugeSensorType.DOWNLOADING_COUNT_SENSOR.value] == 1
assert states[DelugeSensorType.SEEDING_COUNT_SENSOR.value] == 2
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/deluge/test_coordinator.py",
"license": "Apache License 2.0",
"lines": 9,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/derivative/test_diagnostics.py | """Tests for derivative diagnostics."""
from homeassistant.core import HomeAssistant
from tests.components.diagnostics import get_diagnostics_for_config_entry
from tests.typing import ClientSessionGenerator
async def test_diagnostics(
hass: HomeAssistant, hass_client: ClientSessionGenerator, derivative_config_entry
) -> None:
"""Test diagnostics for config entry."""
assert await hass.config_entries.async_setup(derivative_config_entry.entry_id)
await hass.async_block_till_done()
result = await get_diagnostics_for_config_entry(
hass, hass_client, derivative_config_entry
)
assert isinstance(result, dict)
assert result["config_entry"]["domain"] == "derivative"
assert result["config_entry"]["options"]["name"] == "My derivative"
assert result["entity"][0]["entity_id"] == "sensor.my_derivative"
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/derivative/test_diagnostics.py",
"license": "Apache License 2.0",
"lines": 17,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/droplet/test_config_flow.py | """Test Droplet config flow."""
from ipaddress import IPv4Address
from unittest.mock import AsyncMock
import pytest
from homeassistant.components.droplet.const import DOMAIN
from homeassistant.config_entries import SOURCE_USER, SOURCE_ZEROCONF
from homeassistant.const import (
ATTR_CODE,
CONF_CODE,
CONF_DEVICE_ID,
CONF_IP_ADDRESS,
CONF_PORT,
)
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers.service_info.zeroconf import ZeroconfServiceInfo
from .conftest import MOCK_CODE, MOCK_DEVICE_ID, MOCK_HOST, MOCK_PORT
from tests.common import MockConfigEntry
@pytest.mark.parametrize(
("pre_normalized_code", "normalized_code"),
[
(
"abc 123",
"ABC123",
),
(" 123456 ", "123456"),
("123ABC", "123ABC"),
],
ids=["alphanumeric_lower_space", "numeric_space", "alphanumeric_no_space"],
)
async def test_user_setup(
hass: HomeAssistant,
pre_normalized_code: str,
normalized_code: str,
mock_droplet_discovery: AsyncMock,
mock_droplet_connection: AsyncMock,
mock_droplet: AsyncMock,
) -> None:
"""Test successful Droplet user setup."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result is not None
assert result.get("type") is FlowResultType.FORM
assert result.get("step_id") == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={CONF_CODE: pre_normalized_code, CONF_IP_ADDRESS: "192.168.1.2"},
)
assert result is not None
assert result.get("type") is FlowResultType.CREATE_ENTRY
assert result.get("data") == {
CONF_CODE: normalized_code,
CONF_DEVICE_ID: MOCK_DEVICE_ID,
CONF_IP_ADDRESS: MOCK_HOST,
CONF_PORT: MOCK_PORT,
}
assert result.get("context") is not None
assert result.get("context", {}).get("unique_id") == MOCK_DEVICE_ID
@pytest.mark.parametrize(
("device_id", "connect_res"),
[
(
"",
True,
),
(MOCK_DEVICE_ID, False),
],
ids=["no_device_id", "cannot_connect"],
)
async def test_user_setup_fail(
hass: HomeAssistant,
device_id: str,
connect_res: bool,
mock_droplet_discovery: AsyncMock,
mock_droplet_connection: AsyncMock,
mock_droplet: AsyncMock,
) -> None:
"""Test user setup failing due to no device ID or failed connection."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result is not None
assert result.get("type") is FlowResultType.FORM
assert result.get("step_id") == "user"
attrs = {
"get_device_id.return_value": device_id,
"try_connect.return_value": connect_res,
}
mock_droplet_discovery.configure_mock(**attrs)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={CONF_CODE: MOCK_CODE, CONF_IP_ADDRESS: MOCK_HOST},
)
assert result is not None
assert result.get("type") is FlowResultType.FORM
assert result.get("errors") == {"base": "cannot_connect"}
# The user should be able to try again. Maybe the droplet was disconnected from the network or something
attrs = {
"get_device_id.return_value": MOCK_DEVICE_ID,
"try_connect.return_value": True,
}
mock_droplet_discovery.configure_mock(**attrs)
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={CONF_CODE: MOCK_CODE, CONF_IP_ADDRESS: MOCK_HOST},
)
assert result is not None
assert result.get("type") is FlowResultType.CREATE_ENTRY
async def test_user_setup_already_configured(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_droplet_discovery: AsyncMock,
mock_droplet: AsyncMock,
mock_droplet_connection: AsyncMock,
) -> None:
"""Test user setup of an already-configured device."""
mock_config_entry.add_to_hass(hass)
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": SOURCE_USER}
)
assert result is not None
assert result.get("type") is FlowResultType.FORM
assert result.get("step_id") == "user"
result = await hass.config_entries.flow.async_configure(
result["flow_id"],
user_input={CONF_CODE: MOCK_CODE, CONF_IP_ADDRESS: MOCK_HOST},
)
assert result is not None
assert result.get("type") is FlowResultType.ABORT
assert result.get("reason") == "already_configured"
@pytest.mark.parametrize(
("pre_normalized_code", "normalized_code"),
[
(
"abc 123",
"ABC123",
),
(" 123456 ", "123456"),
("123ABC", "123ABC"),
],
ids=["alphanumeric_lower_space", "numeric_space", "alphanumeric_no_space"],
)
async def test_zeroconf_setup(
hass: HomeAssistant,
pre_normalized_code: str,
normalized_code: str,
mock_droplet_discovery: AsyncMock,
mock_droplet: AsyncMock,
mock_droplet_connection: AsyncMock,
) -> None:
"""Test successful setup of Droplet via zeroconf."""
discovery_info = ZeroconfServiceInfo(
ip_address=IPv4Address(MOCK_HOST),
ip_addresses=[IPv4Address(MOCK_HOST)],
port=MOCK_PORT,
hostname=MOCK_DEVICE_ID,
type="_droplet._tcp.local.",
name=MOCK_DEVICE_ID,
properties={},
)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result is not None
assert result.get("type") is FlowResultType.FORM
assert result.get("step_id") == "confirm"
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={CONF_CODE: pre_normalized_code}
)
assert result is not None
assert result.get("type") is FlowResultType.CREATE_ENTRY
assert result.get("data") == {
CONF_DEVICE_ID: MOCK_DEVICE_ID,
CONF_IP_ADDRESS: MOCK_HOST,
CONF_PORT: MOCK_PORT,
CONF_CODE: normalized_code,
}
assert result.get("context") is not None
assert result.get("context", {}).get("unique_id") == MOCK_DEVICE_ID
@pytest.mark.parametrize("mock_droplet_discovery", ["192.168.1.5"], indirect=True)
async def test_zeroconf_update(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_droplet_discovery: AsyncMock,
) -> None:
"""Test updating Droplet's host with zeroconf."""
mock_config_entry.add_to_hass(hass)
# We start with a different host
new_host = "192.168.1.5"
assert mock_config_entry.data[CONF_IP_ADDRESS] != new_host
# After this discovery message, host should be updated
discovery_info = ZeroconfServiceInfo(
ip_address=IPv4Address(new_host),
ip_addresses=[IPv4Address(new_host)],
port=MOCK_PORT,
hostname=MOCK_DEVICE_ID,
type="_droplet._tcp.local.",
name=MOCK_DEVICE_ID,
properties={},
)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result is not None
assert result.get("type") is FlowResultType.ABORT
assert result.get("reason") == "already_configured"
assert mock_config_entry.data[CONF_IP_ADDRESS] == new_host
async def test_zeroconf_invalid_discovery(hass: HomeAssistant) -> None:
"""Test that invalid discovery information causes the config flow to abort."""
discovery_info = ZeroconfServiceInfo(
ip_address=IPv4Address(MOCK_HOST),
ip_addresses=[IPv4Address(MOCK_HOST)],
port=-1,
hostname=MOCK_DEVICE_ID,
type="_droplet._tcp.local.",
name=MOCK_DEVICE_ID,
properties={},
)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result is not None
assert result.get("type") is FlowResultType.ABORT
assert result.get("reason") == "invalid_discovery_info"
async def test_confirm_cannot_connect(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_droplet: AsyncMock,
mock_droplet_connection: AsyncMock,
mock_droplet_discovery: AsyncMock,
) -> None:
"""Test that config flow fails when Droplet can't connect."""
discovery_info = ZeroconfServiceInfo(
ip_address=IPv4Address(MOCK_HOST),
ip_addresses=[IPv4Address(MOCK_HOST)],
port=MOCK_PORT,
hostname=MOCK_DEVICE_ID,
type="_droplet._tcp.local.",
name=MOCK_DEVICE_ID,
properties={},
)
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": SOURCE_ZEROCONF},
data=discovery_info,
)
assert result.get("type") is FlowResultType.FORM
# Mock the connection failing
mock_droplet_discovery.try_connect.return_value = False
result = await hass.config_entries.flow.async_configure(
result["flow_id"], {ATTR_CODE: MOCK_CODE}
)
assert result.get("type") is FlowResultType.FORM
assert result.get("errors")["base"] == "cannot_connect"
mock_droplet_discovery.try_connect.return_value = True
result = await hass.config_entries.flow.async_configure(
result["flow_id"], user_input={ATTR_CODE: MOCK_CODE}
)
assert result.get("type") is FlowResultType.CREATE_ENTRY, result
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/droplet/test_config_flow.py",
"license": "Apache License 2.0",
"lines": 266,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/droplet/test_init.py | """Test Droplet initialization."""
from unittest.mock import AsyncMock
import pytest
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
from . import setup_integration
from tests.common import MockConfigEntry
async def test_setup_no_version_info(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_droplet_discovery: AsyncMock,
mock_droplet_connection: AsyncMock,
mock_droplet: AsyncMock,
caplog: pytest.LogCaptureFixture,
) -> None:
"""Test coordinator setup where Droplet never sends version info."""
mock_droplet.version_info_available.return_value = False
await setup_integration(hass, mock_config_entry)
assert "Failed to get version info from Droplet" in caplog.text
async def test_setup_droplet_offline(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_droplet_discovery: AsyncMock,
mock_droplet_connection: AsyncMock,
mock_droplet: AsyncMock,
) -> None:
"""Test integration setup when Droplet is offline."""
mock_droplet.connected = False
await setup_integration(hass, mock_config_entry)
assert mock_config_entry.state is ConfigEntryState.SETUP_RETRY
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/droplet/test_init.py",
"license": "Apache License 2.0",
"lines": 30,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/droplet/test_sensor.py | """Test Droplet sensors."""
from unittest.mock import AsyncMock
from syrupy.assertion import SnapshotAssertion
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from . import setup_integration
from tests.common import MockConfigEntry, snapshot_platform
async def test_sensors(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_droplet_discovery: AsyncMock,
mock_droplet_connection: AsyncMock,
mock_droplet: AsyncMock,
snapshot: SnapshotAssertion,
entity_registry: er.EntityRegistry,
) -> None:
"""Test Droplet sensors."""
await setup_integration(hass, mock_config_entry)
await snapshot_platform(hass, entity_registry, snapshot, mock_config_entry.entry_id)
async def test_sensors_update_data(
hass: HomeAssistant,
mock_config_entry: MockConfigEntry,
mock_droplet_discovery: AsyncMock,
mock_droplet_connection: AsyncMock,
mock_droplet: AsyncMock,
) -> None:
"""Test Droplet async update data."""
await setup_integration(hass, mock_config_entry)
assert hass.states.get("sensor.mock_title_flow_rate").state == "0.0264172052358148"
mock_droplet.get_flow_rate.return_value = 0.5
mock_droplet.listen_forever.call_args_list[0][0][1]({})
assert hass.states.get("sensor.mock_title_flow_rate").state == "0.132086026179074"
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/droplet/test_sensor.py",
"license": "Apache License 2.0",
"lines": 32,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/ekeybionyx/test_config_flow.py | """Test the ekey bionyx config flow."""
from unittest.mock import patch
import pytest
from homeassistant import config_entries
from homeassistant.components.application_credentials import (
DOMAIN as APPLICATION_CREDENTIALS_DOMAIN,
ClientCredential,
async_import_client_credential,
)
from homeassistant.components.ekeybionyx.const import (
DOMAIN,
OAUTH2_AUTHORIZE,
OAUTH2_TOKEN,
SCOPE,
)
from homeassistant.core import HomeAssistant
from homeassistant.data_entry_flow import FlowResultType
from homeassistant.helpers import config_entry_oauth2_flow
from homeassistant.setup import async_setup_component
from .conftest import dummy_systems
from tests.test_util.aiohttp import AiohttpClientMocker
from tests.typing import ClientSessionGenerator
CLIENT_ID = "1234"
CLIENT_SECRET = "5678"
@pytest.fixture
async def setup_credentials(hass: HomeAssistant) -> None:
"""Fixture to setup credentials."""
assert await async_setup_component(hass, APPLICATION_CREDENTIALS_DOMAIN, {})
await async_import_client_credential(
hass,
DOMAIN,
ClientCredential(CLIENT_ID, CLIENT_SECRET),
)
@pytest.mark.usefixtures("current_request_with_host")
async def test_full_flow(
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
setup_credentials: None,
webhook_id: None,
system: None,
token_hex: None,
) -> None:
"""Check full flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
state = config_entry_oauth2_flow._encode_jwt(
hass,
{
"flow_id": result["flow_id"],
"redirect_uri": "https://example.com/auth/external/callback",
},
)
assert result["url"] == (
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
"&redirect_uri=https://example.com/auth/external/callback"
f"&state={state}"
f"&scope={SCOPE}"
)
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
aioclient_mock.post(
OAUTH2_TOKEN,
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
},
)
flow = await hass.config_entries.flow.async_configure(result["flow_id"])
assert flow.get("step_id") == "choose_system"
flow2 = await hass.config_entries.flow.async_configure(
flow["flow_id"], {"system": "946DA01F-9ABD-4D9D-80C7-02AF85C822A8"}
)
assert flow2.get("step_id") == "webhooks"
flow3 = await hass.config_entries.flow.async_configure(
flow2["flow_id"],
{
"url": "localhost:8123",
},
)
assert flow3.get("errors") == {"base": "no_webhooks_provided", "url": "invalid_url"}
flow4 = await hass.config_entries.flow.async_configure(
flow3["flow_id"],
{
"webhook1": "Test ",
"webhook2": " Invalid",
"webhook3": "1Invalid",
"webhook4": "Also@Invalid",
"webhook5": "Invalid-Name",
"url": "localhost:8123",
},
)
assert flow4.get("errors") == {
"url": "invalid_url",
"webhook1": "invalid_name",
"webhook2": "invalid_name",
"webhook3": "invalid_name",
"webhook4": "invalid_name",
"webhook5": "invalid_name",
}
with patch(
"homeassistant.components.ekeybionyx.async_setup_entry", return_value=True
) as mock_setup:
flow5 = await hass.config_entries.flow.async_configure(
flow2["flow_id"],
{
"webhook1": "Test",
"url": "http://localhost:8123",
},
)
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert hass.config_entries.async_entries(DOMAIN)[0].data == {
"webhooks": [
{
"webhook_id": "1234567890",
"name": "Test",
"auth": "f2156edca7fc6871e13845314a6fc68622e5ad7c58f17663a487ed28cac247f7",
"ekey_id": "946DA01F-9ABD-4D9D-80C7-02AF85C822A8",
}
]
}
assert flow5.get("type") is FlowResultType.CREATE_ENTRY
assert len(mock_setup.mock_calls) == 1
@pytest.mark.usefixtures("current_request_with_host")
async def test_no_own_system(
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
setup_credentials: None,
no_own_system: None,
) -> None:
"""Check no own System flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
state = config_entry_oauth2_flow._encode_jwt(
hass,
{
"flow_id": result["flow_id"],
"redirect_uri": "https://example.com/auth/external/callback",
},
)
assert result["url"] == (
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
"&redirect_uri=https://example.com/auth/external/callback"
f"&state={state}"
f"&scope={SCOPE}"
)
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
aioclient_mock.post(
OAUTH2_TOKEN,
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
},
)
flow = await hass.config_entries.flow.async_configure(result["flow_id"])
assert len(hass.config_entries.async_entries(DOMAIN)) == 0
assert flow.get("type") is FlowResultType.ABORT
assert flow.get("reason") == "no_own_systems"
@pytest.mark.usefixtures("current_request_with_host")
async def test_no_available_webhooks(
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
setup_credentials: None,
no_available_webhooks: None,
) -> None:
"""Check no own System flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
state = config_entry_oauth2_flow._encode_jwt(
hass,
{
"flow_id": result["flow_id"],
"redirect_uri": "https://example.com/auth/external/callback",
},
)
assert result["url"] == (
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
"&redirect_uri=https://example.com/auth/external/callback"
f"&state={state}"
f"&scope={SCOPE}"
)
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
aioclient_mock.post(
OAUTH2_TOKEN,
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
},
)
flow = await hass.config_entries.flow.async_configure(result["flow_id"])
assert len(hass.config_entries.async_entries(DOMAIN)) == 0
assert flow.get("type") is FlowResultType.ABORT
assert flow.get("reason") == "no_available_webhooks"
@pytest.mark.usefixtures("current_request_with_host")
async def test_cleanup(
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
setup_credentials: None,
already_set_up: None,
webhooks: None,
webhook_deletion: None,
) -> None:
"""Check no own System flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
state = config_entry_oauth2_flow._encode_jwt(
hass,
{
"flow_id": result["flow_id"],
"redirect_uri": "https://example.com/auth/external/callback",
},
)
assert result["url"] == (
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
"&redirect_uri=https://example.com/auth/external/callback"
f"&state={state}"
f"&scope={SCOPE}"
)
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
aioclient_mock.post(
OAUTH2_TOKEN,
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
},
)
flow = await hass.config_entries.flow.async_configure(result["flow_id"])
assert flow.get("step_id") == "delete_webhooks"
flow2 = await hass.config_entries.flow.async_configure(flow["flow_id"], {})
assert flow2.get("type") is FlowResultType.SHOW_PROGRESS
aioclient_mock.clear_requests()
aioclient_mock.get(
"https://api.bionyx.io/3rd-party/api/systems",
json=dummy_systems(1, 1, 0),
)
await hass.async_block_till_done()
assert (
hass.config_entries.flow.async_get(flow2["flow_id"]).get("step_id")
== "webhooks"
)
@pytest.mark.usefixtures("current_request_with_host")
async def test_error_on_setup(
hass: HomeAssistant,
hass_client_no_auth: ClientSessionGenerator,
aioclient_mock: AiohttpClientMocker,
setup_credentials: None,
no_response: None,
) -> None:
"""Check no own System flow."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
state = config_entry_oauth2_flow._encode_jwt(
hass,
{
"flow_id": result["flow_id"],
"redirect_uri": "https://example.com/auth/external/callback",
},
)
assert result["url"] == (
f"{OAUTH2_AUTHORIZE}?response_type=code&client_id={CLIENT_ID}"
"&redirect_uri=https://example.com/auth/external/callback"
f"&state={state}"
f"&scope={SCOPE}"
)
client = await hass_client_no_auth()
resp = await client.get(f"/auth/external/callback?code=abcd&state={state}")
assert resp.status == 200
assert resp.headers["content-type"] == "text/html; charset=utf-8"
aioclient_mock.post(
OAUTH2_TOKEN,
json={
"refresh_token": "mock-refresh-token",
"access_token": "mock-access-token",
"type": "Bearer",
"expires_in": 60,
},
)
flow = await hass.config_entries.flow.async_configure(result["flow_id"])
assert len(hass.config_entries.async_entries(DOMAIN)) == 0
assert flow.get("type") is FlowResultType.ABORT
assert flow.get("reason") == "cannot_connect"
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/ekeybionyx/test_config_flow.py",
"license": "Apache License 2.0",
"lines": 308,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/ekeybionyx/test_init.py | """Module contains tests for the ekeybionyx component's initialization.
Functions:
test_async_setup_entry(hass: HomeAssistant, config_entry: MockConfigEntry) -> None:
Test a successful setup entry and unload of entry.
"""
from homeassistant.components.ekeybionyx.const import DOMAIN
from homeassistant.config_entries import ConfigEntryState
from homeassistant.core import HomeAssistant
from tests.common import MockConfigEntry
async def test_async_setup_entry(
hass: HomeAssistant, config_entry: MockConfigEntry
) -> None:
"""Test a successful setup entry and unload of entry."""
config_entry.add_to_hass(hass)
await hass.config_entries.async_setup(config_entry.entry_id)
await hass.async_block_till_done()
assert len(hass.config_entries.async_entries(DOMAIN)) == 1
assert config_entry.state is ConfigEntryState.LOADED
assert await hass.config_entries.async_unload(config_entry.entry_id)
await hass.async_block_till_done()
assert config_entry.state is ConfigEntryState.NOT_LOADED
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/ekeybionyx/test_init.py",
"license": "Apache License 2.0",
"lines": 21,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/esphome/test_analytics.py | """Tests for analytics platform."""
from homeassistant.components.analytics import async_devices_payload
from homeassistant.components.esphome import DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.helpers import device_registry as dr
from homeassistant.setup import async_setup_component
from tests.common import MockConfigEntry
async def test_analytics(
hass: HomeAssistant, device_registry: dr.DeviceRegistry
) -> None:
"""Test the analytics platform."""
await async_setup_component(hass, "analytics", {})
config_entry = MockConfigEntry(domain=DOMAIN, data={})
config_entry.add_to_hass(hass)
device_registry.async_get_or_create(
config_entry_id=config_entry.entry_id,
connections={(dr.CONNECTION_NETWORK_MAC, "12:34:56:AB:CD:EF")},
identifiers={(DOMAIN, "test")},
manufacturer="Test Manufacturer",
)
result = await async_devices_payload(hass)
assert DOMAIN not in result["integrations"]
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/esphome/test_analytics.py",
"license": "Apache License 2.0",
"lines": 22,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/file/test_services.py | """The tests for the notify file platform."""
from unittest.mock import MagicMock
import pytest
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.file import DOMAIN
from homeassistant.components.file.services import (
ATTR_FILE_ENCODING,
ATTR_FILE_NAME,
SERVICE_READ_FILE,
)
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import HomeAssistantError, ServiceValidationError
@pytest.mark.parametrize(
("file_name", "file_encoding"),
[
("tests/components/file/fixtures/file_read.json", "json"),
("tests/components/file/fixtures/file_read.yaml", "yaml"),
("tests/components/file/fixtures/file_read_list.yaml", "yaml"),
],
)
async def test_read_file(
hass: HomeAssistant,
mock_is_allowed_path: MagicMock,
setup_ha_file_integration,
file_name: str,
file_encoding: str,
snapshot: SnapshotAssertion,
) -> None:
"""Test reading files in supported formats."""
result = await hass.services.async_call(
DOMAIN,
SERVICE_READ_FILE,
{
ATTR_FILE_NAME: file_name,
ATTR_FILE_ENCODING: file_encoding,
},
blocking=True,
return_response=True,
)
assert result == snapshot
async def test_read_file_disallowed_path(
hass: HomeAssistant,
setup_ha_file_integration,
) -> None:
"""Test reading in a disallowed path generates error."""
file_name = "tests/components/file/fixtures/file_read.json"
with pytest.raises(ServiceValidationError) as sve:
await hass.services.async_call(
DOMAIN,
SERVICE_READ_FILE,
{
ATTR_FILE_NAME: file_name,
ATTR_FILE_ENCODING: "json",
},
blocking=True,
return_response=True,
)
assert file_name in str(sve.value)
assert sve.value.translation_key == "no_access_to_path"
assert sve.value.translation_domain == DOMAIN
async def test_read_file_bad_encoding_option(
hass: HomeAssistant,
mock_is_allowed_path: MagicMock,
setup_ha_file_integration,
) -> None:
"""Test handling error if an invalid encoding is specified."""
file_name = "tests/components/file/fixtures/file_read.json"
with pytest.raises(ServiceValidationError) as sve:
await hass.services.async_call(
DOMAIN,
SERVICE_READ_FILE,
{
ATTR_FILE_NAME: file_name,
ATTR_FILE_ENCODING: "invalid",
},
blocking=True,
return_response=True,
)
assert file_name in str(sve.value)
assert "invalid" in str(sve.value)
assert sve.value.translation_key == "unsupported_file_encoding"
assert sve.value.translation_domain == DOMAIN
@pytest.mark.parametrize(
("file_name", "file_encoding"),
[
("tests/components/file/fixtures/file_read.not_json", "json"),
("tests/components/file/fixtures/file_read.not_yaml", "yaml"),
],
)
async def test_read_file_decoding_error(
hass: HomeAssistant,
mock_is_allowed_path: MagicMock,
setup_ha_file_integration,
file_name: str,
file_encoding: str,
) -> None:
"""Test decoding errors are handled correctly."""
with pytest.raises(HomeAssistantError) as hae:
await hass.services.async_call(
DOMAIN,
SERVICE_READ_FILE,
{
ATTR_FILE_NAME: file_name,
ATTR_FILE_ENCODING: file_encoding,
},
blocking=True,
return_response=True,
)
assert file_name in str(hae.value)
assert file_encoding in str(hae.value)
assert hae.value.translation_key == "file_decoding"
assert hae.value.translation_domain == DOMAIN
async def test_read_file_dne(
hass: HomeAssistant,
mock_is_allowed_path: MagicMock,
setup_ha_file_integration,
) -> None:
"""Test handling error if file does not exist."""
file_name = "tests/components/file/fixtures/file_dne.yaml"
with pytest.raises(HomeAssistantError) as hae:
_ = await hass.services.async_call(
DOMAIN,
SERVICE_READ_FILE,
{
ATTR_FILE_NAME: file_name,
ATTR_FILE_ENCODING: "yaml",
},
blocking=True,
return_response=True,
)
assert file_name in str(hae.value)
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/file/test_services.py",
"license": "Apache License 2.0",
"lines": 131,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/foscam/test_number.py | """Test the Foscam number platform."""
from unittest.mock import patch
from syrupy.assertion import SnapshotAssertion
from homeassistant.components.foscam.const import DOMAIN
from homeassistant.components.number import (
ATTR_VALUE,
DOMAIN as NUMBER_DOMAIN,
SERVICE_SET_VALUE,
)
from homeassistant.const import ATTR_ENTITY_ID, Platform
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from .conftest import setup_mock_foscam_camera
from .const import ENTRY_ID, VALID_CONFIG
from tests.common import MockConfigEntry, snapshot_platform
async def test_number_entities(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
snapshot: SnapshotAssertion,
) -> None:
"""Test creation of number entities."""
entry = MockConfigEntry(domain=DOMAIN, data=VALID_CONFIG, entry_id=ENTRY_ID)
entry.add_to_hass(hass)
with (
# Mock a valid camera instance
patch("homeassistant.components.foscam.FoscamCamera") as mock_foscam_camera,
patch("homeassistant.components.foscam.PLATFORMS", [Platform.NUMBER]),
):
setup_mock_foscam_camera(mock_foscam_camera)
assert await hass.config_entries.async_setup(entry.entry_id)
await snapshot_platform(hass, entity_registry, snapshot, entry.entry_id)
async def test_setting_number(hass: HomeAssistant) -> None:
"""Test setting a number entity calls the correct method on the camera."""
entry = MockConfigEntry(domain=DOMAIN, data=VALID_CONFIG, entry_id=ENTRY_ID)
entry.add_to_hass(hass)
with patch("homeassistant.components.foscam.FoscamCamera") as mock_foscam_camera:
setup_mock_foscam_camera(mock_foscam_camera)
assert await hass.config_entries.async_setup(entry.entry_id)
await hass.async_block_till_done()
await hass.services.async_call(
NUMBER_DOMAIN,
SERVICE_SET_VALUE,
{
ATTR_ENTITY_ID: "number.mock_title_device_volume",
ATTR_VALUE: 42,
},
blocking=True,
)
mock_foscam_camera.setAudioVolume.assert_called_once_with(42)
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/foscam/test_number.py",
"license": "Apache License 2.0",
"lines": 49,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/hassio/test_switch.py | """The tests for the hassio switch."""
from collections.abc import AsyncGenerator
import os
from unittest.mock import AsyncMock, patch
import pytest
from homeassistant.components.hassio import DOMAIN
from homeassistant.core import HomeAssistant
from homeassistant.helpers import entity_registry as er
from homeassistant.setup import async_setup_component
from .common import MOCK_REPOSITORIES, MOCK_STORE_ADDONS
from tests.common import MockConfigEntry
from tests.test_util.aiohttp import AiohttpClientMocker
MOCK_ENVIRON = {"SUPERVISOR": "127.0.0.1", "SUPERVISOR_TOKEN": "abcdefgh"}
@pytest.fixture
async def setup_integration(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
) -> AsyncGenerator[MockConfigEntry]:
"""Set up the hassio integration and enable entity."""
config_entry = MockConfigEntry(domain=DOMAIN, data={}, unique_id=DOMAIN)
config_entry.add_to_hass(hass)
with patch.dict(os.environ, MOCK_ENVIRON):
result = await async_setup_component(
hass,
"hassio",
{"http": {"server_port": 9999, "server_host": "127.0.0.1"}, "hassio": {}},
)
assert result
await hass.async_block_till_done()
yield config_entry
async def enable_entity(
hass: HomeAssistant,
entity_registry: er.EntityRegistry,
config_entry: MockConfigEntry,
entity_id: str,
) -> None:
"""Enable an entity and reload the config entry."""
entity_registry.async_update_entity(entity_id, disabled_by=None)
await hass.config_entries.async_reload(config_entry.entry_id)
await hass.async_block_till_done()
@pytest.fixture(autouse=True)
def mock_all(
aioclient_mock: AiohttpClientMocker,
addon_installed: AsyncMock,
store_info: AsyncMock,
addon_changelog: AsyncMock,
addon_stats: AsyncMock,
resolution_info: AsyncMock,
jobs_info: AsyncMock,
) -> None:
"""Mock all setup requests."""
aioclient_mock.post("http://127.0.0.1/homeassistant/options", json={"result": "ok"})
aioclient_mock.post("http://127.0.0.1/supervisor/options", json={"result": "ok"})
aioclient_mock.get(
"http://127.0.0.1/info",
json={
"result": "ok",
"data": {
"supervisor": "222",
"homeassistant": "0.110.0",
"hassos": "1.2.3",
},
},
)
aioclient_mock.get(
"http://127.0.0.1/host/info",
json={
"result": "ok",
"data": {
"result": "ok",
"data": {
"chassis": "vm",
"operating_system": "Debian GNU/Linux 10 (buster)",
"kernel": "4.19.0-6-amd64",
},
},
},
)
aioclient_mock.get(
"http://127.0.0.1/core/info",
json={"result": "ok", "data": {"version_latest": "1.0.0", "version": "1.0.0"}},
)
aioclient_mock.get(
"http://127.0.0.1/os/info",
json={
"result": "ok",
"data": {
"version_latest": "1.0.0",
"version": "1.0.0",
"update_available": False,
},
},
)
aioclient_mock.get(
"http://127.0.0.1/supervisor/info",
json={
"result": "ok",
"data": {
"result": "ok",
"version": "1.0.0",
"version_latest": "1.0.0",
"auto_update": True,
"addons": [
{
"name": "test",
"state": "started",
"slug": "test",
"installed": True,
"update_available": True,
"icon": False,
"version": "2.0.0",
"version_latest": "2.0.1",
"repository": "core",
"url": "https://github.com/home-assistant/addons/test",
},
{
"name": "test-two",
"state": "stopped",
"slug": "test-two",
"installed": True,
"update_available": False,
"icon": True,
"version": "3.1.0",
"version_latest": "3.1.0",
"repository": "core",
"url": "https://github.com",
},
],
},
},
)
aioclient_mock.get(
"http://127.0.0.1/core/stats",
json={
"result": "ok",
"data": {
"cpu_percent": 0.99,
"memory_usage": 182611968,
"memory_limit": 3977146368,
"memory_percent": 4.59,
"network_rx": 362570232,
"network_tx": 82374138,
"blk_read": 46010945536,
"blk_write": 15051526144,
},
},
)
aioclient_mock.get(
"http://127.0.0.1/supervisor/stats",
json={
"result": "ok",
"data": {
"cpu_percent": 0.99,
"memory_usage": 182611968,
"memory_limit": 3977146368,
"memory_percent": 4.59,
"network_rx": 362570232,
"network_tx": 82374138,
"blk_read": 46010945536,
"blk_write": 15051526144,
},
},
)
aioclient_mock.get(
"http://127.0.0.1/ingress/panels", json={"result": "ok", "data": {"panels": {}}}
)
aioclient_mock.get(
"http://127.0.0.1/network/info",
json={
"result": "ok",
"data": {
"host_internet": True,
"supervisor_internet": True,
},
},
)
@pytest.mark.parametrize(
("store_addons", "store_repositories"), [(MOCK_STORE_ADDONS, MOCK_REPOSITORIES)]
)
@pytest.mark.parametrize(
("entity_id", "expected", "addon_state"),
[
("switch.test", "on", "started"),
("switch.test_two", "off", "stopped"),
],
)
async def test_switch_state(
hass: HomeAssistant,
entity_id: str,
expected: str,
addon_state: str,
entity_registry: er.EntityRegistry,
addon_installed: AsyncMock,
setup_integration: MockConfigEntry,
) -> None:
"""Test hassio addon switch state."""
addon_installed.return_value.state = addon_state
# Verify that the entity is disabled by default.
assert hass.states.get(entity_id) is None
# Enable the entity.
await enable_entity(hass, entity_registry, setup_integration, entity_id)
# Verify that the entity have the expected state.
state = hass.states.get(entity_id)
assert state is not None
assert state.state == expected
@pytest.mark.parametrize(
("store_addons", "store_repositories"), [(MOCK_STORE_ADDONS, MOCK_REPOSITORIES)]
)
async def test_switch_turn_on(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
entity_registry: er.EntityRegistry,
addon_installed: AsyncMock,
setup_integration: MockConfigEntry,
) -> None:
"""Test turning on addon switch."""
entity_id = "switch.test_two"
addon_installed.return_value.state = "stopped"
# Mock the start addon API call
aioclient_mock.post("http://127.0.0.1/addons/test-two/start", json={"result": "ok"})
# Verify that the entity is disabled by default.
assert hass.states.get(entity_id) is None
# Enable the entity.
await enable_entity(hass, entity_registry, setup_integration, entity_id)
# Verify initial state is off
state = hass.states.get(entity_id)
assert state is not None
assert state.state == "off"
# Turn on the switch
await hass.services.async_call(
"switch",
"turn_on",
{"entity_id": entity_id},
blocking=True,
)
# Verify the API was called
assert aioclient_mock.mock_calls[-1][1].path == "/addons/test-two/start"
assert aioclient_mock.mock_calls[-1][0] == "POST"
@pytest.mark.parametrize(
("store_addons", "store_repositories"), [(MOCK_STORE_ADDONS, MOCK_REPOSITORIES)]
)
async def test_switch_turn_off(
hass: HomeAssistant,
aioclient_mock: AiohttpClientMocker,
entity_registry: er.EntityRegistry,
addon_installed: AsyncMock,
setup_integration: MockConfigEntry,
) -> None:
"""Test turning off addon switch."""
entity_id = "switch.test"
addon_installed.return_value.state = "started"
# Mock the stop addon API call
aioclient_mock.post("http://127.0.0.1/addons/test/stop", json={"result": "ok"})
# Verify that the entity is disabled by default.
assert hass.states.get(entity_id) is None
# Enable the entity.
await enable_entity(hass, entity_registry, setup_integration, entity_id)
# Verify initial state is on
state = hass.states.get(entity_id)
assert state is not None
assert state.state == "on"
# Turn off the switch
await hass.services.async_call(
"switch",
"turn_off",
{"entity_id": entity_id},
blocking=True,
)
# Verify the API was called
assert aioclient_mock.mock_calls[-1][1].path == "/addons/test/stop"
assert aioclient_mock.mock_calls[-1][0] == "POST"
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/hassio/test_switch.py",
"license": "Apache License 2.0",
"lines": 271,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/history_stats/test_diagnostics.py | """Tests for derivative diagnostics."""
import pytest
from homeassistant.components.history_stats.const import DEFAULT_NAME, DOMAIN
from homeassistant.const import CONF_ENTITY_ID, CONF_NAME
from homeassistant.core import HomeAssistant
from tests.components.diagnostics import get_diagnostics_for_config_entry
from tests.typing import ClientSessionGenerator
@pytest.mark.usefixtures("recorder_mock")
async def test_diagnostics(
hass: HomeAssistant, hass_client: ClientSessionGenerator, loaded_entry
) -> None:
"""Test diagnostics for config entry."""
result = await get_diagnostics_for_config_entry(hass, hass_client, loaded_entry)
assert isinstance(result, dict)
assert result["config_entry"]["domain"] == DOMAIN
assert result["config_entry"]["options"][CONF_NAME] == DEFAULT_NAME
assert (
result["config_entry"]["options"][CONF_ENTITY_ID]
== "binary_sensor.test_monitored"
)
assert result["entity"][0]["entity_id"] == "sensor.unnamed_statistics"
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/history_stats/test_diagnostics.py",
"license": "Apache License 2.0",
"lines": 21,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
home-assistant/core:tests/components/homeassistant_connect_zbt2/common.py | """Common constants for the Connect ZBT-2 integration tests."""
from homeassistant.helpers.service_info.usb import UsbServiceInfo
USB_DATA_ZBT2 = UsbServiceInfo(
device="/dev/serial/by-id/usb-Nabu_Casa_ZBT-2_80B54EEFAE18-if01-port0",
vid="303A",
pid="4001",
serial_number="80B54EEFAE18",
manufacturer="Nabu Casa",
description="ZBT-2",
)
| {
"repo_id": "home-assistant/core",
"file_path": "tests/components/homeassistant_connect_zbt2/common.py",
"license": "Apache License 2.0",
"lines": 10,
"canary_id": -1,
"canary_value": "",
"pii_type": "",
"provider": "",
"regex_pattern": "",
"repetition": -1,
"template": ""
} | test |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.