Fix concurrent connection usage in multi-timeframe generator
This commit is contained in:
@ -131,7 +131,7 @@ class CustomTimeframeGenerator:
|
|||||||
return days * 1440
|
return days * 1440
|
||||||
return 1
|
return 1
|
||||||
|
|
||||||
async def aggregate_and_upsert(self, symbol: str, interval: str, bucket_start: datetime) -> None:
|
async def aggregate_and_upsert(self, symbol: str, interval: str, bucket_start: datetime, conn=None) -> None:
|
||||||
"""Aggregate 1m data for a specific bucket and upsert"""
|
"""Aggregate 1m data for a specific bucket and upsert"""
|
||||||
bucket_end = bucket_start # Initialize
|
bucket_end = bucket_start # Initialize
|
||||||
|
|
||||||
@ -159,7 +159,15 @@ class CustomTimeframeGenerator:
|
|||||||
else:
|
else:
|
||||||
bucket_end = bucket_start + timedelta(minutes=1)
|
bucket_end = bucket_start + timedelta(minutes=1)
|
||||||
|
|
||||||
async with self.db.acquire() as conn:
|
# Use provided connection or acquire a new one
|
||||||
|
if conn is None:
|
||||||
|
async with self.db.acquire() as connection:
|
||||||
|
await self._process_aggregation(connection, symbol, interval, source_interval, bucket_start, bucket_end, expected_count)
|
||||||
|
else:
|
||||||
|
await self._process_aggregation(conn, symbol, interval, source_interval, bucket_start, bucket_end, expected_count)
|
||||||
|
|
||||||
|
async def _process_aggregation(self, conn, symbol, interval, source_interval, bucket_start, bucket_end, expected_count):
|
||||||
|
"""Internal method to perform aggregation using a specific connection"""
|
||||||
rows = await conn.fetch(f"""
|
rows = await conn.fetch(f"""
|
||||||
SELECT time, open, high, low, close, volume
|
SELECT time, open, high, low, close, volume
|
||||||
FROM candles
|
FROM candles
|
||||||
@ -186,12 +194,11 @@ class CustomTimeframeGenerator:
|
|||||||
is_complete=is_complete
|
is_complete=is_complete
|
||||||
)
|
)
|
||||||
|
|
||||||
await self._upsert_candle(candle)
|
await self._upsert_candle(candle, conn)
|
||||||
|
|
||||||
async def _upsert_candle(self, c: CustomCandle) -> None:
|
async def _upsert_candle(self, c: CustomCandle, conn=None) -> None:
|
||||||
"""Upsert a single candle"""
|
"""Upsert a single candle using provided connection or acquiring a new one"""
|
||||||
async with self.db.acquire() as conn:
|
query = """
|
||||||
await conn.execute("""
|
|
||||||
INSERT INTO candles (time, symbol, interval, open, high, low, close, volume, validated)
|
INSERT INTO candles (time, symbol, interval, open, high, low, close, volume, validated)
|
||||||
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9)
|
||||||
ON CONFLICT (time, symbol, interval) DO UPDATE SET
|
ON CONFLICT (time, symbol, interval) DO UPDATE SET
|
||||||
@ -202,12 +209,20 @@ class CustomTimeframeGenerator:
|
|||||||
volume = EXCLUDED.volume,
|
volume = EXCLUDED.volume,
|
||||||
validated = EXCLUDED.validated,
|
validated = EXCLUDED.validated,
|
||||||
created_at = NOW()
|
created_at = NOW()
|
||||||
""", c.time, c.symbol, c.interval, c.open, c.high, c.low, c.close, c.volume, c.is_complete)
|
"""
|
||||||
|
values = (c.time, c.symbol, c.interval, c.open, c.high, c.low, c.close, c.volume, c.is_complete)
|
||||||
|
|
||||||
|
if conn is None:
|
||||||
|
async with self.db.acquire() as connection:
|
||||||
|
await connection.execute(query, *values)
|
||||||
|
else:
|
||||||
|
await conn.execute(query, *values)
|
||||||
|
|
||||||
async def update_realtime(self, new_1m_candles: List[Candle]) -> None:
|
async def update_realtime(self, new_1m_candles: List[Candle]) -> None:
|
||||||
"""
|
"""
|
||||||
Update ALL timeframes (standard and custom) based on new 1m candles.
|
Update ALL timeframes (standard and custom) based on new 1m candles.
|
||||||
Called after 1m buffer flush.
|
Called after 1m buffer flush.
|
||||||
|
Uses a single connection for all updates sequentially to prevent pool exhaustion.
|
||||||
"""
|
"""
|
||||||
if not new_1m_candles:
|
if not new_1m_candles:
|
||||||
return
|
return
|
||||||
@ -215,21 +230,29 @@ class CustomTimeframeGenerator:
|
|||||||
if not self.first_1m_time:
|
if not self.first_1m_time:
|
||||||
await self.initialize()
|
await self.initialize()
|
||||||
|
|
||||||
|
if not self.first_1m_time:
|
||||||
|
return
|
||||||
|
|
||||||
symbol = new_1m_candles[0].symbol
|
symbol = new_1m_candles[0].symbol
|
||||||
|
|
||||||
# 1. Update all intervals except 1m and 148m
|
async with self.db.acquire() as conn:
|
||||||
|
# 1. Update all standard intervals + 37m sequentially
|
||||||
|
# sequential is required because we are sharing the same connection 'conn'
|
||||||
intervals_to_update = list(self.STANDARD_INTERVALS.keys()) + ['37m']
|
intervals_to_update = list(self.STANDARD_INTERVALS.keys()) + ['37m']
|
||||||
|
|
||||||
tasks = []
|
|
||||||
for interval in intervals_to_update:
|
for interval in intervals_to_update:
|
||||||
|
try:
|
||||||
bucket_start = self.get_bucket_start(new_1m_candles[-1].time, interval)
|
bucket_start = self.get_bucket_start(new_1m_candles[-1].time, interval)
|
||||||
tasks.append(self.aggregate_and_upsert(symbol, interval, bucket_start))
|
await self.aggregate_and_upsert(symbol, interval, bucket_start, conn=conn)
|
||||||
|
except Exception as e:
|
||||||
await asyncio.gather(*tasks)
|
logger.error(f"Error updating interval {interval}: {e}")
|
||||||
|
|
||||||
# 2. Update 148m (it depends on 37m being updated first)
|
# 2. Update 148m (it depends on 37m being updated first)
|
||||||
|
try:
|
||||||
bucket_148m = self.get_bucket_start(new_1m_candles[-1].time, '148m')
|
bucket_148m = self.get_bucket_start(new_1m_candles[-1].time, '148m')
|
||||||
await self.aggregate_and_upsert(symbol, '148m', bucket_148m)
|
await self.aggregate_and_upsert(symbol, '148m', bucket_148m, conn=conn)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error updating interval 148m: {e}")
|
||||||
|
|
||||||
async def generate_historical(self, interval: str, batch_size: int = 5000) -> int:
|
async def generate_historical(self, interval: str, batch_size: int = 5000) -> int:
|
||||||
"""
|
"""
|
||||||
|
|||||||
Reference in New Issue
Block a user