Update energy_cost_coordinator.py

This commit is contained in:
balgerion
2025-07-01 10:54:22 +02:00
committed by GitHub
parent c672882ddb
commit 67df551dcf

View File

@ -12,20 +12,41 @@ from .const import (
API_URL, API_URL,
ENERGY_COST_ENDPOINT, ENERGY_COST_ENDPOINT,
ENERGY_USAGE_ENDPOINT, ENERGY_USAGE_ENDPOINT,
API_TIMEOUT API_TIMEOUT,
CONF_RETRY_ATTEMPTS,
CONF_RETRY_DELAY,
DEFAULT_RETRY_ATTEMPTS,
DEFAULT_RETRY_DELAY
) )
from .update_coordinator import ExponentialBackoffRetry
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
class PstrykCostDataUpdateCoordinator(DataUpdateCoordinator): class PstrykCostDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching Pstryk energy cost data.""" """Class to manage fetching Pstryk energy cost data."""
def __init__(self, hass: HomeAssistant, api_key: str): def __init__(self, hass: HomeAssistant, api_key: str, retry_attempts=None, retry_delay=None):
"""Initialize.""" """Initialize."""
self.api_key = api_key self.api_key = api_key
self._unsub_hourly = None self._unsub_hourly = None
self._unsub_midnight = None self._unsub_midnight = None
# Get retry configuration from entry options
if retry_attempts is None or retry_delay is None:
# Try to find the config entry to get retry options
for entry in hass.config_entries.async_entries(DOMAIN):
if entry.data.get("api_key") == api_key:
retry_attempts = entry.options.get(CONF_RETRY_ATTEMPTS, DEFAULT_RETRY_ATTEMPTS)
retry_delay = entry.options.get(CONF_RETRY_DELAY, DEFAULT_RETRY_DELAY)
break
else:
# Use defaults if no matching entry found
retry_attempts = DEFAULT_RETRY_ATTEMPTS
retry_delay = DEFAULT_RETRY_DELAY
# Initialize retry mechanism with configurable values
self.retry_mechanism = ExponentialBackoffRetry(max_retries=retry_attempts, base_delay=retry_delay)
super().__init__( super().__init__(
hass, hass,
_LOGGER, _LOGGER,
@ -38,6 +59,7 @@ class PstrykCostDataUpdateCoordinator(DataUpdateCoordinator):
# Schedule midnight updates # Schedule midnight updates
self.schedule_midnight_update() self.schedule_midnight_update()
async def _async_update_data(self): async def _async_update_data(self):
"""Fetch energy cost data from API.""" """Fetch energy cost data from API."""
_LOGGER.debug("Starting energy cost and usage data fetch") _LOGGER.debug("Starting energy cost and usage data fetch")
@ -182,6 +204,8 @@ class PstrykCostDataUpdateCoordinator(DataUpdateCoordinator):
"rae_usage": 0 "rae_usage": 0
} }
live_date = None
# Find the live usage frame (current day) # Find the live usage frame (current day)
if usage_data and usage_data.get("frames"): if usage_data and usage_data.get("frames"):
_LOGGER.info(f"Processing {len(usage_data['frames'])} usage frames") _LOGGER.info(f"Processing {len(usage_data['frames'])} usage frames")
@ -207,26 +231,31 @@ class PstrykCostDataUpdateCoordinator(DataUpdateCoordinator):
break break
# Find the corresponding cost frame for the same day # Find the corresponding cost frame for the same day
if cost_data and cost_data.get("frames"): if cost_data and cost_data.get("frames") and live_date:
_LOGGER.info(f"Processing {len(cost_data['frames'])} cost frames") _LOGGER.info(f"Processing {len(cost_data['frames'])} cost frames, looking for date: {live_date}")
# Look for the most recent cost frame with data # Look for the cost frame that matches the live usage frame's date
for frame in reversed(cost_data["frames"]): for frame in cost_data["frames"]:
frame_start = frame.get("start", "") frame_start = frame.get("start", "")
_LOGGER.info(f"Checking cost frame: start={frame_start}, " frame_date = frame_start.split("T")[0] if frame_start else ""
_LOGGER.info(f"Checking cost frame: start={frame_start}, date={frame_date}, "
f"balance={frame.get('energy_balance_value', 0)}, " f"balance={frame.get('energy_balance_value', 0)}, "
f"cost={frame.get('fae_cost', 0)}") f"cost={frame.get('fae_cost', 0)}")
if (frame.get("energy_balance_value", 0) != 0 or # Match the date with the live frame's date
frame.get("fae_cost", 0) != 0 or if frame_date == live_date:
frame.get("energy_sold_value", 0) != 0):
result["frame"] = frame result["frame"] = frame
result["total_balance"] = frame.get("energy_balance_value", 0) result["total_balance"] = frame.get("energy_balance_value", 0)
result["total_sold"] = frame.get("energy_sold_value", 0) result["total_sold"] = frame.get("energy_sold_value", 0)
result["total_cost"] = abs(frame.get("fae_cost", 0)) result["total_cost"] = abs(frame.get("fae_cost", 0))
_LOGGER.info(f"Found cost frame with data: balance={result['total_balance']}, " _LOGGER.info(f"*** MATCHED cost frame for date {live_date}: balance={result['total_balance']}, "
f"cost={result['total_cost']}, sold={result['total_sold']}") f"cost={result['total_cost']}, sold={result['total_sold']} ***")
break break
else:
_LOGGER.warning(f"No cost frame found matching live date {live_date}")
elif not live_date:
_LOGGER.warning("No live frame found in usage data, cannot match cost frame")
_LOGGER.info(f"=== FINAL RESULT: fae_usage={result['fae_usage']}, " _LOGGER.info(f"=== FINAL RESULT: fae_usage={result['fae_usage']}, "
f"rae_usage={result['rae_usage']}, " f"rae_usage={result['rae_usage']}, "
@ -237,6 +266,7 @@ class PstrykCostDataUpdateCoordinator(DataUpdateCoordinator):
def _process_yearly_data_simple(self, cost_data, usage_data): def _process_yearly_data_simple(self, cost_data, usage_data):
"""Simple yearly data processor - sum all months for the year.""" """Simple yearly data processor - sum all months for the year."""
_LOGGER.info("Processing yearly data - simple version") _LOGGER.info("Processing yearly data - simple version")
@ -301,8 +331,9 @@ class PstrykCostDataUpdateCoordinator(DataUpdateCoordinator):
return result return result
async def _fetch_data(self, url): async def _fetch_data(self, url):
"""Fetch data from the API.""" """Fetch data from the API using retry mechanism."""
try: async def _make_api_request():
"""Make the actual API request."""
_LOGGER.info(f"Fetching data from URL: {url}") _LOGGER.info(f"Fetching data from URL: {url}")
async with aiohttp.ClientSession() as session: async with aiohttp.ClientSession() as session:
async with async_timeout.timeout(API_TIMEOUT): async with async_timeout.timeout(API_TIMEOUT):
@ -317,15 +348,23 @@ class PstrykCostDataUpdateCoordinator(DataUpdateCoordinator):
if resp.status != 200: if resp.status != 200:
error_text = await resp.text() error_text = await resp.text()
_LOGGER.error("API error %s for URL %s: %s", resp.status, url, error_text) _LOGGER.error("API error %s for URL %s: %s", resp.status, url, error_text)
return None raise UpdateFailed(f"API error {resp.status}: {error_text}")
data = await resp.json() data = await resp.json()
_LOGGER.info(f"API response data: {data}") _LOGGER.info(f"API response data: {data}")
return data return data
try:
# Load translations for retry mechanism
await self.retry_mechanism.load_translations(self.hass)
# Use retry mechanism to fetch data
return await self.retry_mechanism.execute(_make_api_request)
except Exception as e: except Exception as e:
_LOGGER.error("Error fetching from %s: %s", url, e) _LOGGER.error("Error fetching from %s after retries: %s", url, e)
return None return None
def schedule_midnight_update(self): def schedule_midnight_update(self):
"""Schedule midnight updates for daily reset.""" """Schedule midnight updates for daily reset."""
if hasattr(self, '_unsub_midnight'): if hasattr(self, '_unsub_midnight'):