Update energy_cost_coordinator.py

This commit is contained in:
balgerion
2025-07-01 10:54:22 +02:00
committed by GitHub
parent c672882ddb
commit 67df551dcf

View File

@ -12,31 +12,53 @@ from .const import (
API_URL, API_URL,
ENERGY_COST_ENDPOINT, ENERGY_COST_ENDPOINT,
ENERGY_USAGE_ENDPOINT, ENERGY_USAGE_ENDPOINT,
API_TIMEOUT API_TIMEOUT,
CONF_RETRY_ATTEMPTS,
CONF_RETRY_DELAY,
DEFAULT_RETRY_ATTEMPTS,
DEFAULT_RETRY_DELAY
) )
from .update_coordinator import ExponentialBackoffRetry
_LOGGER = logging.getLogger(__name__) _LOGGER = logging.getLogger(__name__)
class PstrykCostDataUpdateCoordinator(DataUpdateCoordinator): class PstrykCostDataUpdateCoordinator(DataUpdateCoordinator):
"""Class to manage fetching Pstryk energy cost data.""" """Class to manage fetching Pstryk energy cost data."""
def __init__(self, hass: HomeAssistant, api_key: str): def __init__(self, hass: HomeAssistant, api_key: str, retry_attempts=None, retry_delay=None):
"""Initialize.""" """Initialize."""
self.api_key = api_key self.api_key = api_key
self._unsub_hourly = None self._unsub_hourly = None
self._unsub_midnight = None self._unsub_midnight = None
super().__init__( # Get retry configuration from entry options
hass, if retry_attempts is None or retry_delay is None:
_LOGGER, # Try to find the config entry to get retry options
name=f"{DOMAIN}_cost", for entry in hass.config_entries.async_entries(DOMAIN):
update_interval=timedelta(hours=1), if entry.data.get("api_key") == api_key:
) retry_attempts = entry.options.get(CONF_RETRY_ATTEMPTS, DEFAULT_RETRY_ATTEMPTS)
retry_delay = entry.options.get(CONF_RETRY_DELAY, DEFAULT_RETRY_DELAY)
# Schedule hourly updates break
self.schedule_hourly_update() else:
# Schedule midnight updates # Use defaults if no matching entry found
self.schedule_midnight_update() retry_attempts = DEFAULT_RETRY_ATTEMPTS
retry_delay = DEFAULT_RETRY_DELAY
# Initialize retry mechanism with configurable values
self.retry_mechanism = ExponentialBackoffRetry(max_retries=retry_attempts, base_delay=retry_delay)
super().__init__(
hass,
_LOGGER,
name=f"{DOMAIN}_cost",
update_interval=timedelta(hours=1),
)
# Schedule hourly updates
self.schedule_hourly_update()
# Schedule midnight updates
self.schedule_midnight_update()
async def _async_update_data(self): async def _async_update_data(self):
"""Fetch energy cost data from API.""" """Fetch energy cost data from API."""
@ -170,70 +192,78 @@ class PstrykCostDataUpdateCoordinator(DataUpdateCoordinator):
def _process_daily_data_simple(self, cost_data, usage_data): def _process_daily_data_simple(self, cost_data, usage_data):
"""Simple daily data processor - directly use API values without complex logic.""" """Simple daily data processor - directly use API values without complex logic."""
_LOGGER.info("=== SIMPLE DAILY DATA PROCESSOR ===") _LOGGER.info("=== SIMPLE DAILY DATA PROCESSOR ===")
result = {
"frame": {},
"total_balance": 0,
"total_sold": 0,
"total_cost": 0,
"fae_usage": 0,
"rae_usage": 0
}
# Find the live usage frame (current day)
if usage_data and usage_data.get("frames"):
_LOGGER.info(f"Processing {len(usage_data['frames'])} usage frames")
for i, frame in enumerate(usage_data["frames"]): result = {
_LOGGER.info(f"Frame {i}: start={frame.get('start')}, " "frame": {},
f"is_live={frame.get('is_live', False)}, " "total_balance": 0,
f"fae_usage={frame.get('fae_usage')}, " "total_sold": 0,
f"rae={frame.get('rae')}") "total_cost": 0,
"fae_usage": 0,
"rae_usage": 0
}
live_date = None
# Find the live usage frame (current day)
if usage_data and usage_data.get("frames"):
_LOGGER.info(f"Processing {len(usage_data['frames'])} usage frames")
# Use the frame marked as is_live for i, frame in enumerate(usage_data["frames"]):
if frame.get("is_live", False): _LOGGER.info(f"Frame {i}: start={frame.get('start')}, "
result["fae_usage"] = frame.get("fae_usage", 0) f"is_live={frame.get('is_live', False)}, "
result["rae_usage"] = frame.get("rae", 0) f"fae_usage={frame.get('fae_usage')}, "
_LOGGER.info(f"*** FOUND LIVE FRAME: fae_usage={result['fae_usage']}, rae={result['rae_usage']} ***") f"rae={frame.get('rae')}")
# Store the live frame's date info for cost matching # Use the frame marked as is_live
live_start = frame.get("start") if frame.get("is_live", False):
if live_start: result["fae_usage"] = frame.get("fae_usage", 0)
# Extract the date part for matching with cost data result["rae_usage"] = frame.get("rae", 0)
live_date = live_start.split("T")[0] _LOGGER.info(f"*** FOUND LIVE FRAME: fae_usage={result['fae_usage']}, rae={result['rae_usage']} ***")
_LOGGER.info(f"Live frame date: {live_date}")
break # Store the live frame's date info for cost matching
live_start = frame.get("start")
# Find the corresponding cost frame for the same day if live_start:
if cost_data and cost_data.get("frames"): # Extract the date part for matching with cost data
_LOGGER.info(f"Processing {len(cost_data['frames'])} cost frames") live_date = live_start.split("T")[0]
_LOGGER.info(f"Live frame date: {live_date}")
break
# Look for the most recent cost frame with data # Find the corresponding cost frame for the same day
for frame in reversed(cost_data["frames"]): if cost_data and cost_data.get("frames") and live_date:
frame_start = frame.get("start", "") _LOGGER.info(f"Processing {len(cost_data['frames'])} cost frames, looking for date: {live_date}")
_LOGGER.info(f"Checking cost frame: start={frame_start}, "
f"balance={frame.get('energy_balance_value', 0)}, "
f"cost={frame.get('fae_cost', 0)}")
if (frame.get("energy_balance_value", 0) != 0 or # Look for the cost frame that matches the live usage frame's date
frame.get("fae_cost", 0) != 0 or for frame in cost_data["frames"]:
frame.get("energy_sold_value", 0) != 0): frame_start = frame.get("start", "")
result["frame"] = frame frame_date = frame_start.split("T")[0] if frame_start else ""
result["total_balance"] = frame.get("energy_balance_value", 0)
result["total_sold"] = frame.get("energy_sold_value", 0) _LOGGER.info(f"Checking cost frame: start={frame_start}, date={frame_date}, "
result["total_cost"] = abs(frame.get("fae_cost", 0)) f"balance={frame.get('energy_balance_value', 0)}, "
_LOGGER.info(f"Found cost frame with data: balance={result['total_balance']}, " f"cost={frame.get('fae_cost', 0)}")
f"cost={result['total_cost']}, sold={result['total_sold']}")
break # Match the date with the live frame's date
if frame_date == live_date:
_LOGGER.info(f"=== FINAL RESULT: fae_usage={result['fae_usage']}, " result["frame"] = frame
f"rae_usage={result['rae_usage']}, " result["total_balance"] = frame.get("energy_balance_value", 0)
f"balance={result['total_balance']}, " result["total_sold"] = frame.get("energy_sold_value", 0)
f"cost={result['total_cost']}, " result["total_cost"] = abs(frame.get("fae_cost", 0))
f"sold={result['total_sold']} ===") _LOGGER.info(f"*** MATCHED cost frame for date {live_date}: balance={result['total_balance']}, "
return result f"cost={result['total_cost']}, sold={result['total_sold']} ***")
break
else:
_LOGGER.warning(f"No cost frame found matching live date {live_date}")
elif not live_date:
_LOGGER.warning("No live frame found in usage data, cannot match cost frame")
_LOGGER.info(f"=== FINAL RESULT: fae_usage={result['fae_usage']}, "
f"rae_usage={result['rae_usage']}, "
f"balance={result['total_balance']}, "
f"cost={result['total_cost']}, "
f"sold={result['total_sold']} ===")
return result
@ -301,30 +331,39 @@ class PstrykCostDataUpdateCoordinator(DataUpdateCoordinator):
return result return result
async def _fetch_data(self, url): async def _fetch_data(self, url):
"""Fetch data from the API.""" """Fetch data from the API using retry mechanism."""
try: async def _make_api_request():
_LOGGER.info(f"Fetching data from URL: {url}") """Make the actual API request."""
async with aiohttp.ClientSession() as session: _LOGGER.info(f"Fetching data from URL: {url}")
async with async_timeout.timeout(API_TIMEOUT): async with aiohttp.ClientSession() as session:
resp = await session.get( async with async_timeout.timeout(API_TIMEOUT):
url, resp = await session.get(
headers={ url,
"Authorization": self.api_key, headers={
"Accept": "application/json" "Authorization": self.api_key,
} "Accept": "application/json"
) }
)
if resp.status != 200:
error_text = await resp.text()
_LOGGER.error("API error %s for URL %s: %s", resp.status, url, error_text)
return None
data = await resp.json() if resp.status != 200:
_LOGGER.info(f"API response data: {data}") error_text = await resp.text()
return data _LOGGER.error("API error %s for URL %s: %s", resp.status, url, error_text)
except Exception as e: raise UpdateFailed(f"API error {resp.status}: {error_text}")
_LOGGER.error("Error fetching from %s: %s", url, e)
return None data = await resp.json()
_LOGGER.info(f"API response data: {data}")
return data
try:
# Load translations for retry mechanism
await self.retry_mechanism.load_translations(self.hass)
# Use retry mechanism to fetch data
return await self.retry_mechanism.execute(_make_api_request)
except Exception as e:
_LOGGER.error("Error fetching from %s after retries: %s", url, e)
return None
def schedule_midnight_update(self): def schedule_midnight_update(self):
"""Schedule midnight updates for daily reset.""" """Schedule midnight updates for daily reset."""