measurment tooltip + one week data from binance

This commit is contained in:
2025-07-15 20:27:43 +02:00
parent 3843bfdff8
commit 68d8bc9880
4 changed files with 292 additions and 59 deletions

50
app.py
View File

@ -31,24 +31,43 @@ current_bar = {} # To track the currently forming 1-minute candle
# --- Historical Data Streaming ---
def stream_historical_data(sid):
"""
Fetches the last week of historical 1-minute kline data from Binance,
saves it to a file, and sends it to the connected client.
"""
try:
logging.info(f"Starting historical data stream for SID={sid}")
client = Client()
num_chunks = 6
chunk_size_days = 15
end_date = datetime.utcnow()
all_klines = []
for i in range(num_chunks):
start_date = end_date - timedelta(days=chunk_size_days)
logging.info(f"Fetching chunk {i + 1}/{num_chunks} for SID={sid}")
new_klines = client.get_historical_klines(SYMBOL, Client.KLINE_INTERVAL_1MINUTE, str(start_date), str(end_date))
if new_klines:
all_klines.extend(new_klines)
socketio.emit('history_progress', {'progress': ((i + 1) / num_chunks) * 100}, to=sid)
end_date = start_date
socketio.sleep(0.05)
# --- NEW SOLUTION: Load data for the last week ---
logging.info(f"Fetching historical data for the last 7 days for SID={sid}")
# The `python-binance` library allows using relative date strings.
# This single call is more efficient for this use case.
all_klines = client.get_historical_klines(
SYMBOL,
Client.KLINE_INTERVAL_1MINUTE,
start_str="1 week ago UTC" # Fetches data starting from 7 days ago until now
)
# --- ORIGINAL SOLUTION COMMENTED OUT ---
# num_chunks = 6
# chunk_size_days = 15
# end_date = datetime.utcnow()
# all_klines = []
#
# for i in range(num_chunks):
# start_date = end_date - timedelta(days=chunk_size_days)
# logging.info(f"Fetching chunk {i + 1}/{num_chunks} for SID={sid}")
# new_klines = client.get_historical_klines(SYMBOL, Client.KLINE_INTERVAL_1MINUTE, str(start_date), str(end_date))
# if new_klines:
# all_klines.extend(new_klines)
# # The progress emission is no longer needed for a single API call
# # socketio.emit('history_progress', {'progress': ((i + 1) / num_chunks) * 100}, to=sid)
# end_date = start_date
# socketio.sleep(0.05)
# --- END OF ORIGINAL SOLUTION ---
# The rest of the function processes the `all_klines` data as before
seen = set()
unique_klines = [kline for kline in sorted(all_klines, key=lambda x: x[0]) if tuple(kline) not in seen and not seen.add(tuple(kline))]
@ -57,6 +76,7 @@ def stream_historical_data(sid):
logging.info(f"Finished data stream for SID={sid}. Sending final payload of {len(unique_klines)} klines.")
socketio.emit('history_finished', {'klines_1m': unique_klines}, to=sid)
except Exception as e:
logging.error(f"Error in stream_historical_data for SID={sid}: {e}", exc_info=True)
socketio.emit('history_error', {'message': str(e)}, to=sid)
@ -123,4 +143,4 @@ def index():
# --- Main Application Execution ---
if __name__ == '__main__':
logging.info("Starting Flask-SocketIO server...")
socketio.run(app, host='0.0.0.0', port=5000, allow_unsafe_werkzeug=True, debug=False)
socketio.run(app, host='0.0.0.0', port=5000, allow_unsafe_werkzeug=True, debug=False)