Fix concurrent connection usage in multi-timeframe generator

This commit is contained in:
BTC Bot
2026-02-11 23:52:30 +01:00
parent ec37fae0fd
commit f9559d1116

View File

@ -131,7 +131,7 @@ class CustomTimeframeGenerator:
return days * 1440 return days * 1440
return 1 return 1
async def aggregate_and_upsert(self, symbol: str, interval: str, bucket_start: datetime) -> None: async def aggregate_and_upsert(self, symbol: str, interval: str, bucket_start: datetime, conn=None) -> None:
"""Aggregate 1m data for a specific bucket and upsert""" """Aggregate 1m data for a specific bucket and upsert"""
bucket_end = bucket_start # Initialize bucket_end = bucket_start # Initialize
@ -159,55 +159,70 @@ class CustomTimeframeGenerator:
else: else:
bucket_end = bucket_start + timedelta(minutes=1) bucket_end = bucket_start + timedelta(minutes=1)
async with self.db.acquire() as conn: # Use provided connection or acquire a new one
rows = await conn.fetch(f""" if conn is None:
SELECT time, open, high, low, close, volume async with self.db.acquire() as connection:
FROM candles await self._process_aggregation(connection, symbol, interval, source_interval, bucket_start, bucket_end, expected_count)
WHERE symbol = $1 AND interval = $2 else:
AND time >= $3 AND time < $4 await self._process_aggregation(conn, symbol, interval, source_interval, bucket_start, bucket_end, expected_count)
ORDER BY time ASC
""", symbol, source_interval, bucket_start, bucket_end)
if not rows: async def _process_aggregation(self, conn, symbol, interval, source_interval, bucket_start, bucket_end, expected_count):
return """Internal method to perform aggregation using a specific connection"""
rows = await conn.fetch(f"""
SELECT time, open, high, low, close, volume
FROM candles
WHERE symbol = $1 AND interval = $2
AND time >= $3 AND time < $4
ORDER BY time ASC
""", symbol, source_interval, bucket_start, bucket_end)
# Aggregate if not rows:
is_complete = len(rows) >= expected_count return
candle = CustomCandle( # Aggregate
time=bucket_start, is_complete = len(rows) >= expected_count
symbol=symbol,
interval=interval,
open=float(rows[0]['open']),
high=max(float(r['high']) for r in rows),
low=min(float(r['low']) for r in rows),
close=float(rows[-1]['close']),
volume=sum(float(r['volume']) for r in rows),
is_complete=is_complete
)
await self._upsert_candle(candle) candle = CustomCandle(
time=bucket_start,
symbol=symbol,
interval=interval,
open=float(rows[0]['open']),
high=max(float(r['high']) for r in rows),
low=min(float(r['low']) for r in rows),
close=float(rows[-1]['close']),
volume=sum(float(r['volume']) for r in rows),
is_complete=is_complete
)
async def _upsert_candle(self, c: CustomCandle) -> None: await self._upsert_candle(candle, conn)
"""Upsert a single candle"""
async with self.db.acquire() as conn: async def _upsert_candle(self, c: CustomCandle, conn=None) -> None:
await conn.execute(""" """Upsert a single candle using provided connection or acquiring a new one"""
INSERT INTO candles (time, symbol, interval, open, high, low, close, volume, validated) query = """
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9) INSERT INTO candles (time, symbol, interval, open, high, low, close, volume, validated)
ON CONFLICT (time, symbol, interval) DO UPDATE SET VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
open = EXCLUDED.open, ON CONFLICT (time, symbol, interval) DO UPDATE SET
high = EXCLUDED.high, open = EXCLUDED.open,
low = EXCLUDED.low, high = EXCLUDED.high,
close = EXCLUDED.close, low = EXCLUDED.low,
volume = EXCLUDED.volume, close = EXCLUDED.close,
validated = EXCLUDED.validated, volume = EXCLUDED.volume,
created_at = NOW() validated = EXCLUDED.validated,
""", c.time, c.symbol, c.interval, c.open, c.high, c.low, c.close, c.volume, c.is_complete) created_at = NOW()
"""
values = (c.time, c.symbol, c.interval, c.open, c.high, c.low, c.close, c.volume, c.is_complete)
if conn is None:
async with self.db.acquire() as connection:
await connection.execute(query, *values)
else:
await conn.execute(query, *values)
async def update_realtime(self, new_1m_candles: List[Candle]) -> None: async def update_realtime(self, new_1m_candles: List[Candle]) -> None:
""" """
Update ALL timeframes (standard and custom) based on new 1m candles. Update ALL timeframes (standard and custom) based on new 1m candles.
Called after 1m buffer flush. Called after 1m buffer flush.
Uses a single connection for all updates sequentially to prevent pool exhaustion.
""" """
if not new_1m_candles: if not new_1m_candles:
return return
@ -215,21 +230,29 @@ class CustomTimeframeGenerator:
if not self.first_1m_time: if not self.first_1m_time:
await self.initialize() await self.initialize()
if not self.first_1m_time:
return
symbol = new_1m_candles[0].symbol symbol = new_1m_candles[0].symbol
# 1. Update all intervals except 1m and 148m async with self.db.acquire() as conn:
intervals_to_update = list(self.STANDARD_INTERVALS.keys()) + ['37m'] # 1. Update all standard intervals + 37m sequentially
# sequential is required because we are sharing the same connection 'conn'
intervals_to_update = list(self.STANDARD_INTERVALS.keys()) + ['37m']
tasks = [] for interval in intervals_to_update:
for interval in intervals_to_update: try:
bucket_start = self.get_bucket_start(new_1m_candles[-1].time, interval) bucket_start = self.get_bucket_start(new_1m_candles[-1].time, interval)
tasks.append(self.aggregate_and_upsert(symbol, interval, bucket_start)) await self.aggregate_and_upsert(symbol, interval, bucket_start, conn=conn)
except Exception as e:
logger.error(f"Error updating interval {interval}: {e}")
await asyncio.gather(*tasks) # 2. Update 148m (it depends on 37m being updated first)
try:
# 2. Update 148m (it depends on 37m being updated first) bucket_148m = self.get_bucket_start(new_1m_candles[-1].time, '148m')
bucket_148m = self.get_bucket_start(new_1m_candles[-1].time, '148m') await self.aggregate_and_upsert(symbol, '148m', bucket_148m, conn=conn)
await self.aggregate_and_upsert(symbol, '148m', bucket_148m) except Exception as e:
logger.error(f"Error updating interval 148m: {e}")
async def generate_historical(self, interval: str, batch_size: int = 5000) -> int: async def generate_historical(self, interval: str, batch_size: int = 5000) -> int:
""" """