Update sensor.py

This commit is contained in:
balgerion
2025-09-30 16:12:28 +02:00
committed by GitHub
parent 85c0e6ce24
commit bcf36a439a

View File

@ -1,17 +1,16 @@
"""Sensor platform for Pstryk Energy integration.""" """Sensor platform for Pstryk Energy integration."""
import logging import logging
import asyncio import asyncio
from datetime import datetime, timedelta from datetime import timedelta
from decimal import Decimal, ROUND_HALF_UP
from homeassistant.config_entries import ConfigEntry from homeassistant.config_entries import ConfigEntry
from homeassistant.core import HomeAssistant, callback from homeassistant.core import HomeAssistant, callback
from homeassistant.components.sensor import SensorEntity, SensorStateClass, SensorDeviceClass from homeassistant.components.sensor import SensorEntity, SensorStateClass, SensorDeviceClass
from homeassistant.helpers.update_coordinator import CoordinatorEntity from homeassistant.helpers.update_coordinator import CoordinatorEntity
from homeassistant.helpers.restore_state import RestoreEntity from homeassistant.helpers.restore_state import RestoreEntity
from homeassistant.helpers import entity_registry as er
from homeassistant.util import dt as dt_util from homeassistant.util import dt as dt_util
from .update_coordinator import PstrykDataUpdateCoordinator from .update_coordinator import PstrykDataUpdateCoordinator
from .energy_cost_coordinator import PstrykCostDataUpdateCoordinator from .energy_cost_coordinator import PstrykCostDataUpdateCoordinator
from .api_client import PstrykAPIClient
from .const import ( from .const import (
DOMAIN, DOMAIN,
CONF_MQTT_48H_MODE, CONF_MQTT_48H_MODE,
@ -27,6 +26,29 @@ _LOGGER = logging.getLogger(__name__)
# Store translations globally to avoid reloading for each sensor # Store translations globally to avoid reloading for each sensor
_TRANSLATIONS_CACHE = {} _TRANSLATIONS_CACHE = {}
# Cache for manifest version
_VERSION_CACHE = None
def get_integration_version(hass: HomeAssistant) -> str:
"""Get integration version from manifest.json."""
global _VERSION_CACHE
if _VERSION_CACHE is not None:
return _VERSION_CACHE
try:
import json
import os
manifest_path = os.path.join(os.path.dirname(__file__), "manifest.json")
with open(manifest_path, "r") as f:
manifest = json.load(f)
_VERSION_CACHE = manifest.get("version", "unknown")
return _VERSION_CACHE
except Exception as ex:
_LOGGER.warning("Failed to read version from manifest.json: %s", ex)
return "unknown"
async def async_setup_entry( async def async_setup_entry(
hass: HomeAssistant, hass: HomeAssistant,
entry: ConfigEntry, entry: ConfigEntry,
@ -83,6 +105,14 @@ async def async_setup_entry(
cost_coordinator._unsub_midnight() cost_coordinator._unsub_midnight()
hass.data[DOMAIN].pop(cost_key, None) hass.data[DOMAIN].pop(cost_key, None)
# Create shared API client (or reuse existing one)
api_client_key = f"{entry.entry_id}_api_client"
if api_client_key not in hass.data[DOMAIN]:
api_client = PstrykAPIClient(hass, api_key)
hass.data[DOMAIN][api_client_key] = api_client
else:
api_client = hass.data[DOMAIN][api_client_key]
entities = [] entities = []
coordinators = [] coordinators = []
@ -91,7 +121,7 @@ async def async_setup_entry(
key = f"{entry.entry_id}_{price_type}" key = f"{entry.entry_id}_{price_type}"
coordinator = PstrykDataUpdateCoordinator( coordinator = PstrykDataUpdateCoordinator(
hass, hass,
api_key, api_client,
price_type, price_type,
mqtt_48h_mode, mqtt_48h_mode,
retry_attempts, retry_attempts,
@ -99,40 +129,49 @@ async def async_setup_entry(
) )
coordinators.append((coordinator, price_type, key)) coordinators.append((coordinator, price_type, key))
# Create cost coordinator # Create cost coordinator (will be initialized as unavailable for lazy loading)
cost_coordinator = PstrykCostDataUpdateCoordinator(hass, api_key, retry_attempts, retry_delay) cost_coordinator = PstrykCostDataUpdateCoordinator(hass, api_client)
cost_coordinator.last_update_success = False
coordinators.append((cost_coordinator, "cost", cost_key)) coordinators.append((cost_coordinator, "cost", cost_key))
# Initialize coordinators in parallel to save time # Initialize ONLY price coordinators immediately (fast startup)
initial_refresh_tasks = [] # Cost coordinator will be loaded lazily in background
for coordinator, coordinator_type, _ in coordinators: _LOGGER.info("Starting quick initialization - loading price coordinators only")
# Check if we're in the setup process or reloading
try:
# Newer Home Assistant versions
from homeassistant.config_entries import ConfigEntryState
is_setup = entry.state == ConfigEntryState.SETUP_IN_PROGRESS
except ImportError:
# Older Home Assistant versions - try another approach
is_setup = not hass.data[DOMAIN].get(f"{entry.entry_id}_initialized", False)
if is_setup: async def safe_initial_fetch(coord, coord_type):
initial_refresh_tasks.append(coordinator.async_config_entry_first_refresh()) """Safely fetch initial data for coordinator."""
else: try:
initial_refresh_tasks.append(coordinator.async_refresh()) data = await coord._async_update_data()
coord.data = data
coord.last_update_success = True
_LOGGER.debug("Successfully initialized %s coordinator", coord_type)
return True
except Exception as err:
_LOGGER.error("Failed initial fetch for %s coordinator: %s", coord_type, err)
coord.last_update_success = False
return err
# Load only price coordinators immediately for fast startup
price_coordinators = [(c, t, k) for c, t, k in coordinators if t in ("buy", "sell")]
initial_refresh_tasks = [
safe_initial_fetch(coordinator, coordinator_type)
for coordinator, coordinator_type, _ in price_coordinators
]
refresh_results = await asyncio.gather(*initial_refresh_tasks, return_exceptions=True) refresh_results = await asyncio.gather(*initial_refresh_tasks, return_exceptions=True)
# Mark as initialized after first setup # Check results for price coordinators
hass.data[DOMAIN][f"{entry.entry_id}_initialized"] = True for i, (coordinator, coordinator_type, key) in enumerate(price_coordinators):
# Process coordinators and set up sensors
for i, (coordinator, coordinator_type, key) in enumerate(coordinators):
# Check if initial refresh succeeded
if isinstance(refresh_results[i], Exception): if isinstance(refresh_results[i], Exception):
_LOGGER.error("Failed to initialize %s coordinator: %s", _LOGGER.error("Failed to initialize %s coordinator: %s",
coordinator_type, str(refresh_results[i])) coordinator_type, str(refresh_results[i]))
# Still add coordinator and set up sensors even if initial load failed
# Store all coordinators and set up scheduling
buy_coord = None
sell_coord = None
for coordinator, coordinator_type, key in coordinators:
# Store coordinator # Store coordinator
hass.data[DOMAIN][key] = coordinator hass.data[DOMAIN][key] = coordinator
@ -145,43 +184,76 @@ async def async_setup_entry(
if mqtt_48h_mode: if mqtt_48h_mode:
coordinator.schedule_afternoon_update() coordinator.schedule_afternoon_update()
# Create ONLY current price sensors (fast, immediate)
top = buy_top if coordinator_type == "buy" else sell_top
worst = buy_worst if coordinator_type == "buy" else sell_worst
entities.append(PstrykPriceSensor(coordinator, coordinator_type, top, worst, entry.entry_id))
# Store coordinator references for later use
if coordinator_type == "buy":
buy_coord = coordinator
elif coordinator_type == "sell":
sell_coord = coordinator
# Schedule updates for cost coordinator # Schedule updates for cost coordinator
elif coordinator_type == "cost": elif coordinator_type == "cost":
coordinator.schedule_hourly_update() coordinator.schedule_hourly_update()
coordinator.schedule_midnight_update() coordinator.schedule_midnight_update()
# Create price sensors # Create remaining sensors (average price + financial balance) - they will show as unavailable initially
if coordinator_type in ("buy", "sell"): remaining_entities = []
top = buy_top if coordinator_type == "buy" else sell_top
worst = buy_worst if coordinator_type == "buy" else sell_worst
entities.append(PstrykPriceSensor(coordinator, coordinator_type, top, worst, entry.entry_id))
# Create average price sensors (with both coordinators) # Create average price sensors for buy
entities.append(PstrykAveragePriceSensor( if buy_coord:
cost_coordinator, for period in ("daily", "monthly", "yearly"):
coordinator, # Pass the actual price coordinator, not string! remaining_entities.append(PstrykAveragePriceSensor(
"monthly", cost_coordinator, buy_coord, period, entry.entry_id
entry.entry_id
))
entities.append(PstrykAveragePriceSensor(
cost_coordinator,
coordinator, # Pass the actual price coordinator, not string!
"yearly",
entry.entry_id
)) ))
# Create financial balance sensors using cost coordinator # Create average price sensors for sell
entities.append(PstrykFinancialBalanceSensor( if sell_coord:
cost_coordinator, "daily", entry.entry_id for period in ("daily", "monthly", "yearly"):
)) remaining_entities.append(PstrykAveragePriceSensor(
entities.append(PstrykFinancialBalanceSensor( cost_coordinator, sell_coord, period, entry.entry_id
cost_coordinator, "monthly", entry.entry_id ))
))
entities.append(PstrykFinancialBalanceSensor(
cost_coordinator, "yearly", entry.entry_id
))
async_add_entities(entities, True) # Create financial balance sensors
for period in ("daily", "monthly", "yearly"):
remaining_entities.append(PstrykFinancialBalanceSensor(
cost_coordinator, period, entry.entry_id
))
# Register ALL sensors immediately:
# - Current price sensors (2) with data
# - Remaining sensors (15) as unavailable until cost coordinator loads
_LOGGER.info("Registering %d current price sensors with data and %d additional sensors as unavailable",
len(entities), len(remaining_entities))
async_add_entities(entities + remaining_entities)
# Load cost coordinator data in background - sensors will automatically update when data arrives
async def lazy_load_cost_data():
"""Load cost coordinator data in background - sensors update automatically via coordinator."""
_LOGGER.info("Waiting 15 seconds before loading cost coordinator data")
await asyncio.sleep(15)
_LOGGER.info("Loading cost coordinator data in background")
try:
# Load cost coordinator with all resolutions
data = await cost_coordinator._async_update_data(fetch_all=True)
cost_coordinator.data = data
cost_coordinator.last_update_success = True
# Notify all listening sensors that data is available
cost_coordinator.async_update_listeners()
_LOGGER.info("Cost coordinator loaded successfully - %d sensors updated",
len(remaining_entities))
except Exception as err:
_LOGGER.warning("Failed to load cost coordinator: %s. %d sensors remain unavailable.",
err, len(remaining_entities))
cost_coordinator.last_update_success = False
cost_coordinator.data = None
# Start background data loading
hass.async_create_task(lazy_load_cost_data())
class PstrykPriceSensor(CoordinatorEntity, SensorEntity): class PstrykPriceSensor(CoordinatorEntity, SensorEntity):
@ -218,7 +290,7 @@ class PstrykPriceSensor(CoordinatorEntity, SensorEntity):
"name": "Pstryk Energy", "name": "Pstryk Energy",
"manufacturer": "Pstryk", "manufacturer": "Pstryk",
"model": "Energy Price Monitor", "model": "Energy Price Monitor",
"sw_version": "1.7.1", "sw_version": get_integration_version(self.hass),
} }
def _get_current_price(self): def _get_current_price(self):
@ -752,7 +824,7 @@ class PstrykAveragePriceSensor(RestoreEntity, SensorEntity):
self.cost_coordinator = cost_coordinator self.cost_coordinator = cost_coordinator
self.price_coordinator = price_coordinator self.price_coordinator = price_coordinator
self.price_type = price_coordinator.price_type self.price_type = price_coordinator.price_type
self.period = period # 'monthly' or 'yearly' self.period = period # 'daily', 'monthly' or 'yearly'
self.entry_id = entry_id self.entry_id = entry_id
self._attr_device_class = SensorDeviceClass.MONETARY self._attr_device_class = SensorDeviceClass.MONETARY
self._state = None self._state = None
@ -810,7 +882,7 @@ class PstrykAveragePriceSensor(RestoreEntity, SensorEntity):
"name": "Pstryk Energy", "name": "Pstryk Energy",
"manufacturer": "Pstryk", "manufacturer": "Pstryk",
"model": "Energy Price Monitor", "model": "Energy Price Monitor",
"sw_version": "1.7.1", "sw_version": get_integration_version(self.hass),
} }
@property @property
@ -956,7 +1028,7 @@ class PstrykFinancialBalanceSensor(CoordinatorEntity, SensorEntity):
"name": "Pstryk Energy", "name": "Pstryk Energy",
"manufacturer": "Pstryk", "manufacturer": "Pstryk",
"model": "Energy Price Monitor", "model": "Energy Price Monitor",
"sw_version": "1.7.1", "sw_version": get_integration_version(self.hass),
} }
@property @property