From 2eef7dbc170220d13efe400279df802b2025d67f Mon Sep 17 00:00:00 2001 From: DiTus Date: Tue, 21 Oct 2025 15:09:14 +0200 Subject: [PATCH 01/18] live market web socket --- live_candle_fetcher.py | 238 +++++++++++++++++++++++++++++++++++++++++ main_app.py | 201 +++++++++++++--------------------- 2 files changed, 310 insertions(+), 129 deletions(-) create mode 100644 live_candle_fetcher.py diff --git a/live_candle_fetcher.py b/live_candle_fetcher.py new file mode 100644 index 0000000..b8bd7b5 --- /dev/null +++ b/live_candle_fetcher.py @@ -0,0 +1,238 @@ +import argparse +import logging +import os +import sys +import json +import time +from datetime import datetime, timezone +from hyperliquid.info import Info +from hyperliquid.utils import constants +import sqlite3 +from queue import Queue +from threading import Thread + +from logging_utils import setup_logging + +class LiveCandleFetcher: + """ + Connects to Hyperliquid to maintain a complete and up-to-date database of + 1-minute candles using a robust producer-consumer architecture to prevent + data corruption and duplication. + """ + + def __init__(self, log_level: str, coins: list): + setup_logging(log_level, 'LiveCandleFetcher') + self.db_path = os.path.join("_data", "market_data.db") + self.coins_to_watch = set(coins) + if not self.coins_to_watch: + logging.error("No coins provided to watch. Exiting.") + sys.exit(1) + + self.info = Info(constants.MAINNET_API_URL, skip_ws=False) + self.candle_queue = Queue() # Thread-safe queue for candles + self._ensure_tables_exist() + + def _ensure_tables_exist(self): + """ + Ensures that all necessary tables are created with the correct schema and PRIMARY KEY. + If a table exists with an incorrect schema, it attempts to migrate the data. + """ + with sqlite3.connect(self.db_path) as conn: + for coin in self.coins_to_watch: + table_name = f"{coin}_1m" + cursor = conn.cursor() + cursor.execute(f"PRAGMA table_info('{table_name}')") + columns = cursor.fetchall() + + if columns: + pk_found = any(col[1] == 'timestamp_ms' and col[5] == 1 for col in columns) + if not pk_found: + logging.warning(f"Schema migration needed for table '{table_name}': 'timestamp_ms' is not the PRIMARY KEY.") + logging.warning("Attempting to automatically rebuild the table...") + try: + # 1. Rename old table + conn.execute(f'ALTER TABLE "{table_name}" RENAME TO "{table_name}_old"') + logging.info(f" -> Renamed existing table to '{table_name}_old'.") + + # 2. Create new table with correct schema + self._create_candle_table(conn, table_name) + logging.info(f" -> Created new '{table_name}' table with correct schema.") + + # 3. Copy unique data from old table to new table + conn.execute(f''' + INSERT OR IGNORE INTO "{table_name}" (datetime_utc, timestamp_ms, open, high, low, close, volume, number_of_trades) + SELECT datetime_utc, timestamp_ms, open, high, low, close, volume, number_of_trades + FROM "{table_name}_old" + ''') + conn.commit() + logging.info(" -> Copied data to new table.") + + # 4. Drop the old table + conn.execute(f'DROP TABLE "{table_name}_old"') + logging.info(f" -> Removed old table. Migration for '{table_name}' complete.") + except Exception as e: + logging.error(f"FATAL: Automatic schema migration for '{table_name}' failed: {e}") + logging.error("Please delete the database file '_data/market_data.db' manually and restart.") + sys.exit(1) + else: + # If table does not exist, create it + self._create_candle_table(conn, table_name) + logging.info("Database tables verified.") + + def _create_candle_table(self, conn, table_name: str): + """Creates a new candle table with the correct schema.""" + conn.execute(f''' + CREATE TABLE "{table_name}" ( + datetime_utc TEXT, + timestamp_ms INTEGER PRIMARY KEY, + open REAL, + high REAL, + low REAL, + close REAL, + volume REAL, + number_of_trades INTEGER + ) + ''') + + def on_message(self, message): + """ + Callback function to process incoming candle messages. This is the "Producer". + It puts the raw message onto the queue for the DB writer. + """ + try: + if message.get("channel") == "candle": + candle_data = message.get("data", {}) + if candle_data: + self.candle_queue.put(candle_data) + except Exception as e: + logging.error(f"Error in on_message: {e}") + + def _database_writer_thread(self): + """ + This is the "Consumer" thread. It runs forever, pulling candles from the + queue and writing them to the database, ensuring all writes are serial. + """ + while True: + try: + candle = self.candle_queue.get() + if candle is None: # A signal to stop the thread + break + + coin = candle.get('coin') + if not coin: + continue + + table_name = f"{coin}_1m" + record = ( + datetime.fromtimestamp(candle['t'] / 1000, tz=timezone.utc).strftime('%Y-%m-%d %H:%M:%S'), + candle['t'], + candle.get('o'), candle.get('h'), candle.get('l'), candle.get('c'), + candle.get('v'), candle.get('n') + ) + + with sqlite3.connect(self.db_path) as conn: + conn.execute(f''' + INSERT OR REPLACE INTO "{table_name}" (datetime_utc, timestamp_ms, open, high, low, close, volume, number_of_trades) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) + ''', record) + conn.commit() + logging.debug(f"Upserted candle for {coin} at {record[0]}") + + except Exception as e: + logging.error(f"Error in database writer thread: {e}") + + def _get_last_timestamp_from_db(self, coin: str) -> int: + """Gets the most recent millisecond timestamp from a coin's 1m table.""" + table_name = f"{coin}_1m" + try: + with sqlite3.connect(self.db_path) as conn: + result = conn.execute(f'SELECT MAX(timestamp_ms) FROM "{table_name}"').fetchone() + return int(result[0]) if result and result[0] is not None else None + except Exception as e: + logging.error(f"Could not read last timestamp from table '{table_name}': {e}") + return None + + def _fetch_historical_candles(self, coin: str, start_ms: int, end_ms: int): + """Fetches historical candles and puts them on the queue for the writer.""" + logging.info(f"Fetching historical data for {coin} from {datetime.fromtimestamp(start_ms/1000)}...") + current_start = start_ms + + while current_start < end_ms: + try: + http_info = Info(constants.MAINNET_API_URL, skip_ws=True) + batch = http_info.candles_snapshot(coin, "1m", current_start, end_ms) + if not batch: + break + + for candle in batch: + candle['coin'] = coin + self.candle_queue.put(candle) + + last_ts = batch[-1]['t'] + if last_ts < current_start: + break + current_start = last_ts + 1 + time.sleep(0.5) + except Exception as e: + logging.error(f"Error fetching historical chunk for {coin}: {e}") + break + + logging.info(f"Historical data fetching for {coin} is complete.") + + def run(self): + """ + Starts the database writer, catches up on historical data, then + subscribes to the WebSocket for live updates. + """ + db_writer = Thread(target=self._database_writer_thread, daemon=True) + db_writer.start() + + logging.info("--- Starting Historical Data Catch-Up Phase ---") + now_ms = int(time.time() * 1000) + for coin in self.coins_to_watch: + last_ts = self._get_last_timestamp_from_db(coin) + start_ts = last_ts + 60000 if last_ts else now_ms - (7 * 24 * 60 * 60 * 1000) + if start_ts < now_ms: + self._fetch_historical_candles(coin, start_ts, now_ms) + + logging.info("--- Historical Catch-Up Complete. Starting Live WebSocket Feed ---") + for coin in self.coins_to_watch: + # --- FIX: Use a lambda to create a unique callback for each subscription --- + # This captures the 'coin' variable and adds it to the message data. + callback = lambda msg, c=coin: self.on_message({**msg, 'data': {**msg.get('data',{}), 'coin': c}}) + subscription = {"type": "candle", "coin": coin, "interval": "1m"} + self.info.subscribe(subscription, callback) + logging.info(f"Subscribed to 1m candles for {coin}") + time.sleep(0.2) + + print("\nListening for live candle data... Press Ctrl+C to stop.") + try: + while True: + time.sleep(1) + except KeyboardInterrupt: + print("\nStopping WebSocket listener...") + self.info.ws_manager.stop() + self.candle_queue.put(None) + db_writer.join() + print("Listener stopped.") + + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="A hybrid historical and live candle data fetcher for Hyperliquid.") + parser.add_argument( + "--coins", + nargs='+', + required=True, + help="List of coin symbols to fetch (e.g., BTC ETH)." + ) + parser.add_argument( + "--log-level", + default="normal", + choices=['off', 'normal', 'debug'], + help="Set the logging level for the script." + ) + args = parser.parse_args() + + fetcher = LiveCandleFetcher(log_level=args.log_level, coins=args.coins) + fetcher.run() + diff --git a/main_app.py b/main_app.py index 47670b4..a53256b 100644 --- a/main_app.py +++ b/main_app.py @@ -11,18 +11,18 @@ import pandas as pd from datetime import datetime, timezone from logging_utils import setup_logging +# --- Using the high-performance WebSocket utility for live prices --- +from live_market_utils import start_live_feed # --- Configuration --- WATCHED_COINS = ["BTC", "ETH", "SOL", "BNB", "HYPE", "ASTER", "ZEC", "PUMP", "SUI"] -COIN_LISTER_SCRIPT = "list_coins.py" -MARKET_FEEDER_SCRIPT = "market.py" -DATA_FETCHER_SCRIPT = "data_fetcher.py" +# --- FIX: Replaced old data_fetcher with the new live_candle_fetcher --- +LIVE_CANDLE_FETCHER_SCRIPT = "live_candle_fetcher.py" RESAMPLER_SCRIPT = "resampler.py" MARKET_CAP_FETCHER_SCRIPT = "market_cap_fetcher.py" +TRADE_EXECUTOR_SCRIPT = "trade_executor.py" STRATEGY_CONFIG_FILE = os.path.join("_data", "strategies.json") -PRICE_DATA_FILE = os.path.join("_data", "current_prices.json") DB_PATH = os.path.join("_data", "market_data.db") -STATUS_FILE = os.path.join("_data", "fetcher_status.json") MARKET_CAP_SUMMARY_FILE = os.path.join("_data", "market_cap_data.json") LOGS_DIR = "_logs" TRADE_EXECUTOR_STATUS_FILE = os.path.join(LOGS_DIR, "trade_executor_status.json") @@ -41,47 +41,22 @@ def format_market_cap(mc_value): return f"${mc_value:,.2f}" -def run_market_feeder(): - """Target function to run market.py and redirect its output to a log file.""" - log_file = os.path.join(LOGS_DIR, "market_feeder.log") +def run_live_candle_fetcher(): + """Target function to run the live_candle_fetcher.py script in a resilient loop.""" + log_file = os.path.join(LOGS_DIR, "live_candle_fetcher.log") while True: try: with open(log_file, 'a') as f: - subprocess.run( - [sys.executable, MARKET_FEEDER_SCRIPT, "--log-level", "off"], - check=True, stdout=f, stderr=subprocess.STDOUT - ) + command = [sys.executable, LIVE_CANDLE_FETCHER_SCRIPT, "--coins"] + WATCHED_COINS + ["--log-level", "off"] + f.write(f"\n--- Starting {LIVE_CANDLE_FETCHER_SCRIPT} at {datetime.now()} ---\n") + subprocess.run(command, check=True, stdout=f, stderr=subprocess.STDOUT) except (subprocess.CalledProcessError, Exception) as e: with open(log_file, 'a') as f: f.write(f"\n--- PROCESS ERROR at {datetime.now()} ---\n") - f.write(f"Market feeder script failed: {e}. Restarting...\n") + f.write(f"Live candle fetcher failed: {e}. Restarting...\n") time.sleep(5) -def run_data_fetcher_job(): - """Defines the job for the data fetcher, redirecting output to a log file.""" - log_file = os.path.join(LOGS_DIR, "data_fetcher.log") - try: - command = [sys.executable, DATA_FETCHER_SCRIPT, "--coins"] + WATCHED_COINS + ["--days", "7", "--log-level", "off"] - with open(log_file, 'a') as f: - f.write(f"\n--- Starting data_fetcher.py job at {datetime.now()} ---\n") - subprocess.run(command, check=True, stdout=f, stderr=subprocess.STDOUT) - except Exception as e: - with open(log_file, 'a') as f: - f.write(f"\n--- SCHEDULER ERROR at {datetime.now()} ---\n") - f.write(f"Failed to run data_fetcher.py job: {e}\n") - - -def data_fetcher_scheduler(): - """Schedules the data_fetcher.py script.""" - setup_logging('off', 'DataFetcherScheduler') - run_data_fetcher_job() - schedule.every(1).minutes.do(run_data_fetcher_job) - while True: - schedule.run_pending() - time.sleep(1) - - def run_resampler_job(timeframes_to_generate: list): """Defines the job for the resampler, redirecting output to a log file.""" log_file = os.path.join(LOGS_DIR, "resampler.log") @@ -133,8 +108,7 @@ def run_strategy(strategy_name: str, config: dict): """Target function to run a strategy, redirecting its output to a log file.""" log_file = os.path.join(LOGS_DIR, f"strategy_{strategy_name}.log") script_name = config['script'] - params_str = json.dumps(config['parameters']) - command = [sys.executable, script_name, "--name", strategy_name, "--params", params_str, "--log-level", "normal"] + command = [sys.executable, script_name, "--name", strategy_name, "--log-level", "normal"] while True: try: with open(log_file, 'a') as f: @@ -146,13 +120,27 @@ def run_strategy(strategy_name: str, config: dict): f.write(f"Strategy '{strategy_name}' failed: {e}. Restarting...\n") time.sleep(10) +def run_trade_executor(): + """Target function to run the trade_executor.py script in a resilient loop.""" + log_file = os.path.join(LOGS_DIR, "trade_executor.log") + while True: + try: + with open(log_file, 'a') as f: + f.write(f"\n--- Starting Trade Executor at {datetime.now()} ---\n") + subprocess.run([sys.executable, TRADE_EXECUTOR_SCRIPT, "--log-level", "normal"], check=True, stdout=f, stderr=subprocess.STDOUT) + except (subprocess.CalledProcessError, Exception) as e: + with open(log_file, 'a') as f: + f.write(f"\n--- PROCESS ERROR at {datetime.now()} ---\n") + f.write(f"Trade Executor failed: {e}. Restarting...\n") + time.sleep(10) + class MainApp: - def __init__(self, coins_to_watch: list, processes: dict, strategy_configs: dict): + def __init__(self, coins_to_watch: list, processes: dict, strategy_configs: dict, shared_prices: dict): self.watched_coins = coins_to_watch + self.shared_prices = shared_prices self.prices = {} self.market_caps = {} - self.last_db_update_info = "Initializing..." self.open_positions = {} self.background_processes = processes self.process_status = {} @@ -160,21 +148,17 @@ class MainApp: self.strategy_statuses = {} def read_prices(self): - """Reads the latest prices from the JSON file.""" - if os.path.exists(PRICE_DATA_FILE): - try: - with open(PRICE_DATA_FILE, 'r', encoding='utf-8') as f: - self.prices = json.load(f) - except (json.JSONDecodeError, IOError): - logging.debug("Could not read price file.") + """Reads the latest prices directly from the shared memory dictionary.""" + try: + self.prices = dict(self.shared_prices) + except Exception as e: + logging.debug(f"Could not read from shared prices dict: {e}") def read_market_caps(self): - """Reads the latest market cap summary from its JSON file.""" if os.path.exists(MARKET_CAP_SUMMARY_FILE): try: with open(MARKET_CAP_SUMMARY_FILE, 'r', encoding='utf-8') as f: summary_data = json.load(f) - for coin in self.watched_coins: table_key = f"{coin}_market_cap" if table_key in summary_data: @@ -183,7 +167,6 @@ class MainApp: logging.debug("Could not read market cap summary file.") def read_strategy_statuses(self): - """Reads the status JSON file for each enabled strategy.""" enabled_statuses = {} for name, config in self.strategy_configs.items(): if config.get("enabled", False): @@ -199,7 +182,6 @@ class MainApp: self.strategy_statuses = enabled_statuses def read_executor_status(self): - """Reads the live status file from the trade executor.""" if os.path.exists(TRADE_EXECUTOR_STATUS_FILE): try: with open(TRADE_EXECUTOR_STATUS_FILE, 'r', encoding='utf-8') as f: @@ -209,43 +191,15 @@ class MainApp: else: self.open_positions = {} - - def get_overall_db_status(self): - """Reads the fetcher status from the status file.""" - if os.path.exists(STATUS_FILE): - try: - with open(STATUS_FILE, 'r', encoding='utf-8') as f: - status = json.load(f) - coin = status.get("last_updated_coin") - timestamp_utc_str = status.get("last_run_timestamp_utc") - num_candles = status.get("num_updated_candles", 0) - if timestamp_utc_str: - dt_utc = datetime.fromisoformat(timestamp_utc_str.replace('Z', '+00:00')).replace(tzinfo=timezone.utc) - dt_local = dt_utc.astimezone(None) - - offset = dt_local.utcoffset() - offset_hours = int(offset.total_seconds() / 3600) - sign = '+' if offset_hours >= 0 else '' - offset_str = f"UTC{sign}{offset_hours}" - timestamp_display = f"{dt_local.strftime('%Y-%m-%d %H:%M:%S')} {offset_str}" - else: - timestamp_display = "N/A" - self.last_db_update_info = f"{coin} at {timestamp_display} | {num_candles} candles" - except (IOError, json.JSONDecodeError): - self.last_db_update_info = "Error reading status file." - def check_process_status(self): - """Checks if the background processes are still running.""" for name, process in self.background_processes.items(): self.process_status[name] = "Running" if process.is_alive() else "STOPPED" def display_dashboard(self): - """Displays a formatted dashboard with side-by-side tables.""" - print("\x1b[H\x1b[J", end="") # Clear screen + print("\x1b[H\x1b[J", end="") - left_table_lines = [] + left_table_lines = ["--- Market Dashboard ---"] left_table_width = 44 - left_table_lines.append("--- Market Dashboard ---") left_table_lines.append("-" * left_table_width) left_table_lines.append(f"{'#':<2} | {'Coin':^6} | {'Live Price':>10} | {'Market Cap':>15} |") left_table_lines.append("-" * left_table_width) @@ -256,9 +210,8 @@ class MainApp: left_table_lines.append(f"{i:<2} | {coin:^6} | {price:>10} | {formatted_mc:>15} |") left_table_lines.append("-" * left_table_width) - right_table_lines = [] + right_table_lines = ["--- Strategy Status ---"] right_table_width = 154 - right_table_lines.append("--- Strategy Status ---") right_table_lines.append("-" * right_table_width) right_table_lines.append(f"{'#':^2} | {'Strategy Name':<25} | {'Coin':^6} | {'Signal':^8} | {'Signal Price':>12} | {'Last Change':>17} | {'TF':^5} | {'Size':^8} | {'Parameters':<45} |") right_table_lines.append("-" * right_table_width) @@ -280,7 +233,6 @@ class MainApp: other_params = {k: v for k, v in config_params.items() if k not in ['coin', 'timeframe', 'size']} params_str = ", ".join([f"{k}={v}" for k, v in other_params.items()]) - right_table_lines.append(f"{i:^2} | {name:<25} | {coin:^6} | {signal:^8} | {price_display:>12} | {last_change_display:>17} | {timeframe:^5} | {size:>8} | {params_str:<45} |") right_table_lines.append("-" * right_table_width) @@ -292,8 +244,6 @@ class MainApp: left_part = left_table_lines[i] if i < len(left_table_lines) else " " * left_table_width right_part = indent + right_table_lines[i] if i < len(right_table_lines) else "" output_lines.append(f"{left_part}{separator}{right_part}") - - output_lines.append(f"\nDB Status: Last update -> {self.last_db_update_info}") output_lines.append("\n--- Open Positions ---") pos_table_width = 100 @@ -308,7 +258,6 @@ class MainApp: output_lines.append("No open positions found.") else: for pos in perps_positions: - # --- FIX: Safely handle potentially None values before formatting --- try: pnl = float(pos.get('pnl', 0.0)) pnl_str = f"${pnl:,.2f}" @@ -343,27 +292,18 @@ class MainApp: while True: self.read_prices() self.read_market_caps() - self.get_overall_db_status() self.read_strategy_statuses() self.read_executor_status() self.check_process_status() self.display_dashboard() - time.sleep(2) - + time.sleep(0.5) if __name__ == "__main__": setup_logging('normal', 'MainApp') if not os.path.exists(LOGS_DIR): os.makedirs(LOGS_DIR) - - logging.info(f"Running coin lister: '{COIN_LISTER_SCRIPT}'...") - try: - subprocess.run([sys.executable, COIN_LISTER_SCRIPT], check=True, capture_output=True, text=True) - except subprocess.CalledProcessError as e: - logging.error(f"Failed to run '{COIN_LISTER_SCRIPT}'. Error: {e.stderr}") - sys.exit(1) - + processes = {} strategy_configs = {} @@ -382,37 +322,40 @@ if __name__ == "__main__": required_timeframes.add(tf) if not required_timeframes: - logging.warning("No timeframes required by any enabled strategy. Resampler will not run effectively.") + logging.warning("No timeframes required by any enabled strategy.") - - processes["Market Feeder"] = multiprocessing.Process(target=run_market_feeder, daemon=True) - processes["Data Fetcher"] = multiprocessing.Process(target=data_fetcher_scheduler, daemon=True) - processes["Resampler"] = multiprocessing.Process(target=resampler_scheduler, args=(list(required_timeframes),), daemon=True) - processes["Market Cap Fetcher"] = multiprocessing.Process(target=market_cap_fetcher_scheduler, daemon=True) - - for name, config in strategy_configs.items(): - if config.get("enabled", False): - if not os.path.exists(config['script']): - logging.error(f"Strategy script '{config['script']}' for strategy '{name}' not found. Skipping.") - continue - proc = multiprocessing.Process(target=run_strategy, args=(name, config), daemon=True) - processes[f"Strategy: {name}"] = proc + with multiprocessing.Manager() as manager: + shared_prices = manager.dict() - for name, proc in processes.items(): - logging.info(f"Starting process '{name}'...") - proc.start() - - time.sleep(3) + processes["Live Market Feed"] = multiprocessing.Process(target=start_live_feed, args=(shared_prices, 'off'), daemon=True) + processes["Live Candle Fetcher"] = multiprocessing.Process(target=run_live_candle_fetcher, daemon=True) + processes["Resampler"] = multiprocessing.Process(target=resampler_scheduler, args=(list(required_timeframes),), daemon=True) + processes["Market Cap Fetcher"] = multiprocessing.Process(target=market_cap_fetcher_scheduler, daemon=True) + processes["Trade Executor"] = multiprocessing.Process(target=run_trade_executor, daemon=True) + + for name, config in strategy_configs.items(): + if config.get("enabled", False): + if not os.path.exists(config['script']): + logging.error(f"Strategy script '{config['script']}' for '{name}' not found. Skipping.") + continue + proc = multiprocessing.Process(target=run_strategy, args=(name, config), daemon=True) + processes[f"Strategy: {name}"] = proc - app = MainApp(coins_to_watch=WATCHED_COINS, processes=processes, strategy_configs=strategy_configs) - try: - app.run() - except KeyboardInterrupt: - logging.info("Shutting down...") - for proc in processes.values(): - if proc.is_alive(): proc.terminate() - for proc in processes.values(): - if proc.is_alive(): proc.join() - logging.info("Shutdown complete.") - sys.exit(0) + for name, proc in processes.items(): + logging.info(f"Starting process '{name}'...") + proc.start() + + time.sleep(3) + + app = MainApp(coins_to_watch=WATCHED_COINS, processes=processes, strategy_configs=strategy_configs, shared_prices=shared_prices) + try: + app.run() + except KeyboardInterrupt: + logging.info("Shutting down...") + for proc in processes.values(): + if proc.is_alive(): proc.terminate() + for proc in processes.values(): + if proc.is_alive(): proc.join() + logging.info("Shutdown complete.") + sys.exit(0) From cac440586625248e2abede7fe382144127b39caf Mon Sep 17 00:00:00 2001 From: DiTus Date: Tue, 21 Oct 2025 15:09:53 +0200 Subject: [PATCH 02/18] live market --- live_market_utils.py | 49 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 49 insertions(+) create mode 100644 live_market_utils.py diff --git a/live_market_utils.py b/live_market_utils.py new file mode 100644 index 0000000..bc5246b --- /dev/null +++ b/live_market_utils.py @@ -0,0 +1,49 @@ +import logging +import json +import time +from hyperliquid.info import Info +from hyperliquid.utils import constants + +from logging_utils import setup_logging + +def on_message(message, shared_prices_dict): + """ + Callback function to process incoming 'allMids' messages and update the + shared memory dictionary directly. + """ + try: + if message.get("channel") == "allMids": + new_prices = message.get("data", {}).get("mids", {}) + # Update the shared dictionary with the new price data + shared_prices_dict.update(new_prices) + except Exception as e: + # It's important to log errors inside the process + logging.error(f"Error in WebSocket on_message: {e}") + +def start_live_feed(shared_prices_dict, log_level='off'): + """ + Main function for the WebSocket process. It takes a shared dictionary + and continuously feeds it with live market data. + """ + setup_logging(log_level, 'LiveMarketFeed') + + # The Info object manages the WebSocket connection. + info = Info(constants.MAINNET_API_URL, skip_ws=False) + + # We need to wrap the callback in a lambda to pass our shared dictionary + callback = lambda msg: on_message(msg, shared_prices_dict) + + # Subscribe to the allMids channel + subscription = {"type": "allMids"} + info.subscribe(subscription, callback) + logging.info("Subscribed to 'allMids' for live mark prices.") + + logging.info("Starting live price feed process. Press Ctrl+C in main app to stop.") + try: + # The background thread in the SDK handles messages. This loop just keeps the process alive. + while True: + time.sleep(1) + except KeyboardInterrupt: + logging.info("Stopping WebSocket listener...") + info.ws_manager.stop() + logging.info("Listener stopped.") From 5a05f0d190a22afe151c2f8e3920747e8d7f2b44 Mon Sep 17 00:00:00 2001 From: DiTus Date: Tue, 21 Oct 2025 23:07:07 +0200 Subject: [PATCH 03/18] resampler much faster --- main_app.py | 32 ++++++------ resampler.py | 138 +++++++++++++++++++++++++++++++++++++++++---------- 2 files changed, 125 insertions(+), 45 deletions(-) diff --git a/main_app.py b/main_app.py index a53256b..aca61fb 100644 --- a/main_app.py +++ b/main_app.py @@ -16,7 +16,6 @@ from live_market_utils import start_live_feed # --- Configuration --- WATCHED_COINS = ["BTC", "ETH", "SOL", "BNB", "HYPE", "ASTER", "ZEC", "PUMP", "SUI"] -# --- FIX: Replaced old data_fetcher with the new live_candle_fetcher --- LIVE_CANDLE_FETCHER_SCRIPT = "live_candle_fetcher.py" RESAMPLER_SCRIPT = "resampler.py" MARKET_CAP_FETCHER_SCRIPT = "market_cap_fetcher.py" @@ -27,6 +26,9 @@ MARKET_CAP_SUMMARY_FILE = os.path.join("_data", "market_cap_data.json") LOGS_DIR = "_logs" TRADE_EXECUTOR_STATUS_FILE = os.path.join(LOGS_DIR, "trade_executor_status.json") +# --- ADDED: Standard list of timeframes for the resampler to generate --- +STANDARD_RESAMPLING_TIMEFRAMES = ["3m", "5m", "15m", "30m", "37m", "148m", "1h", "2h", "4h", "8h", "12h", "1d", "3d", "1w", "1M"] + def format_market_cap(mc_value): """Formats a large number into a human-readable market cap string.""" @@ -61,7 +63,7 @@ def run_resampler_job(timeframes_to_generate: list): """Defines the job for the resampler, redirecting output to a log file.""" log_file = os.path.join(LOGS_DIR, "resampler.log") try: - command = [sys.executable, RESAMPLER_SCRIPT, "--coins"] + WATCHED_COINS + ["--timeframes"] + timeframes_to_generate + ["--log-level", "off"] + command = [sys.executable, RESAMPLER_SCRIPT, "--coins"] + WATCHED_COINS + ["--timeframes"] + timeframes_to_generate + ["--log-level", "normal"] with open(log_file, 'a') as f: f.write(f"\n--- Starting resampler.py job at {datetime.now()} ---\n") subprocess.run(command, check=True, stdout=f, stderr=subprocess.STDOUT) @@ -71,14 +73,17 @@ def run_resampler_job(timeframes_to_generate: list): f.write(f"Failed to run resampler.py job: {e}\n") -def resampler_scheduler(timeframes_to_generate: list): - """Schedules the resampler.py script.""" +def resampler_scheduler(): + """Schedules the resampler.py script to run at the start of every minute.""" setup_logging('off', 'ResamplerScheduler') - run_resampler_job(timeframes_to_generate) - schedule.every(4).minutes.do(run_resampler_job, timeframes_to_generate) + # Run once at startup + run_resampler_job(STANDARD_RESAMPLING_TIMEFRAMES) + # Schedule to run every minute at the :01 second mark + schedule.every().minute.at(":01").do(run_resampler_job, timeframes_to_generate=STANDARD_RESAMPLING_TIMEFRAMES) + logging.info("Resampler scheduled to run every minute at :01.") while True: schedule.run_pending() - time.sleep(1) + time.sleep(1) # Check every second to not miss the scheduled time def run_market_cap_fetcher_job(): @@ -314,22 +319,13 @@ if __name__ == "__main__": logging.error(f"Could not load strategies from '{STRATEGY_CONFIG_FILE}': {e}") sys.exit(1) - required_timeframes = set() - for name, config in strategy_configs.items(): - if config.get("enabled", False): - tf = config.get("parameters", {}).get("timeframe") - if tf: - required_timeframes.add(tf) - - if not required_timeframes: - logging.warning("No timeframes required by any enabled strategy.") - with multiprocessing.Manager() as manager: shared_prices = manager.dict() processes["Live Market Feed"] = multiprocessing.Process(target=start_live_feed, args=(shared_prices, 'off'), daemon=True) processes["Live Candle Fetcher"] = multiprocessing.Process(target=run_live_candle_fetcher, daemon=True) - processes["Resampler"] = multiprocessing.Process(target=resampler_scheduler, args=(list(required_timeframes),), daemon=True) + # --- FIX: The resampler now uses a fixed list of TFs and a new schedule --- + processes["Resampler"] = multiprocessing.Process(target=resampler_scheduler, daemon=True) processes["Market Cap Fetcher"] = multiprocessing.Process(target=market_cap_fetcher_scheduler, daemon=True) processes["Trade Executor"] = multiprocessing.Process(target=run_trade_executor, daemon=True) diff --git a/resampler.py b/resampler.py index 09fec2d..ea489b9 100644 --- a/resampler.py +++ b/resampler.py @@ -5,7 +5,7 @@ import sys import sqlite3 import pandas as pd import json -from datetime import datetime, timezone +from datetime import datetime, timezone, timedelta # Assuming logging_utils.py is in the same directory from logging_utils import setup_logging @@ -13,7 +13,8 @@ from logging_utils import setup_logging class Resampler: """ Reads new 1-minute candle data from the SQLite database, resamples it to - various timeframes, and appends the new candles to the corresponding tables. + various timeframes, and upserts the new candles to the corresponding tables, + preventing data duplication. """ def __init__(self, log_level: str, coins: list, timeframes: dict): @@ -32,6 +33,51 @@ class Resampler: } self.resampling_status = self._load_existing_status() self.job_start_time = None + self._ensure_tables_exist() + + def _ensure_tables_exist(self): + """ + Ensures all resampled tables exist with a PRIMARY KEY on datetime_utc. + Attempts to migrate existing tables if the schema is incorrect. + """ + with sqlite3.connect(self.db_path) as conn: + for coin in self.coins_to_process: + for tf_name in self.timeframes.keys(): + table_name = f"{coin}_{tf_name}" + cursor = conn.cursor() + cursor.execute(f"PRAGMA table_info('{table_name}')") + columns = cursor.fetchall() + if columns: + pk_found = any(col[1] == 'datetime_utc' and col[5] == 1 for col in columns) + if not pk_found: + logging.warning(f"Schema migration needed for table '{table_name}'.") + try: + conn.execute(f'ALTER TABLE "{table_name}" RENAME TO "{table_name}_old"') + self._create_resampled_table(conn, table_name) + conn.execute(f'INSERT OR IGNORE INTO "{table_name}" SELECT * FROM "{table_name}_old"') + conn.execute(f'DROP TABLE "{table_name}_old"') + conn.commit() + logging.info(f"Successfully migrated schema for '{table_name}'.") + except Exception as e: + logging.error(f"FATAL: Migration for '{table_name}' failed: {e}. Please delete 'market_data.db' and restart.") + sys.exit(1) + else: + self._create_resampled_table(conn, table_name) + logging.info("All resampled table schemas verified.") + + def _create_resampled_table(self, conn, table_name): + """Creates a new resampled table with the correct schema.""" + conn.execute(f''' + CREATE TABLE "{table_name}" ( + datetime_utc TEXT PRIMARY KEY, + open REAL, + high REAL, + low REAL, + close REAL, + volume REAL, + number_of_trades INTEGER + ) + ''') def _load_existing_status(self) -> dict: """Loads the existing status file if it exists, otherwise returns an empty dict.""" @@ -51,6 +97,14 @@ class Resampler: self.job_start_time = datetime.now(timezone.utc) logging.info(f"--- Resampling job started at {self.job_start_time.strftime('%Y-%m-%d %H:%M:%S %Z')} ---") + if '1m' in self.timeframes: + logging.debug("Ignoring '1m' timeframe as it is the source resolution.") + del self.timeframes['1m'] + + if not self.timeframes: + logging.warning("No timeframes to process after filtering. Exiting job.") + return + if not os.path.exists(self.db_path): logging.error(f"Database file '{self.db_path}' not found.") return @@ -61,37 +115,58 @@ class Resampler: logging.debug(f"Processing {len(self.coins_to_process)} coins...") for coin in self.coins_to_process: - source_table_name = f"{coin}_1m" logging.debug(f"--- Processing {coin} ---") try: - # Load the full 1m history once per coin - df_1m = pd.read_sql(f'SELECT * FROM "{source_table_name}"', conn, parse_dates=['datetime_utc']) - if df_1m.empty: - logging.warning(f"Source table '{source_table_name}' is empty. Skipping.") - continue - df_1m.set_index('datetime_utc', inplace=True) - for tf_name, tf_code in self.timeframes.items(): target_table_name = f"{coin}_{tf_name}" + source_table_name = f"{coin}_1m" logging.debug(f" Updating {tf_name} table...") last_timestamp = self._get_last_timestamp(conn, target_table_name) - # Get the new 1-minute data that needs to be processed - new_df_1m = df_1m[df_1m.index > last_timestamp] if last_timestamp else df_1m + query = f'SELECT * FROM "{source_table_name}"' + params = () + if last_timestamp: + query += ' WHERE datetime_utc >= ?' + try: + # --- FIX: Try the fast method first --- + interval_delta = pd.to_timedelta(tf_code) + query_start_date = last_timestamp - interval_delta + except ValueError: + # --- FIX: Fall back to the safe method for special timeframes --- + logging.debug(f"Cannot create timedelta for '{tf_code}'. Using safe 32-day lookback.") + query_start_date = last_timestamp - timedelta(days=32) + + params = (query_start_date.strftime('%Y-%m-%d %H:%M:%S'),) - if new_df_1m.empty: + df_1m = pd.read_sql(query, conn, params=params, parse_dates=['datetime_utc']) + + if df_1m.empty: logging.debug(f" -> No new 1-minute data for {tf_name}. Table is up to date.") continue - resampled_df = new_df_1m.resample(tf_code).agg(self.aggregation_logic) + df_1m.set_index('datetime_utc', inplace=True) + resampled_df = df_1m.resample(tf_code).agg(self.aggregation_logic) resampled_df.dropna(how='all', inplace=True) if not resampled_df.empty: - # Append the newly resampled data to the target table - resampled_df.to_sql(target_table_name, conn, if_exists='append', index=True) - logging.debug(f" -> Appended {len(resampled_df)} new candles to '{target_table_name}'.") + records_to_upsert = [] + for index, row in resampled_df.iterrows(): + records_to_upsert.append(( + index.strftime('%Y-%m-%d %H:%M:%S'), + row['open'], row['high'], row['low'], row['close'], + row['volume'], row['number_of_trades'] + )) + + cursor = conn.cursor() + cursor.executemany(f''' + INSERT OR REPLACE INTO "{target_table_name}" (datetime_utc, open, high, low, close, volume, number_of_trades) + VALUES (?, ?, ?, ?, ?, ?, ?) + ''', records_to_upsert) + conn.commit() + + logging.debug(f" -> Upserted {len(resampled_df)} candles into '{target_table_name}'.") if coin not in self.resampling_status: self.resampling_status[coin] = {} total_candles = int(self._get_table_count(conn, target_table_name)) @@ -111,7 +186,6 @@ class Resampler: """Logs a summary of the total candles for each timeframe.""" logging.info("--- Resampling Job Summary ---") timeframe_totals = {} - # Iterate through coins, skipping metadata keys for coin, tfs in self.resampling_status.items(): if not isinstance(tfs, dict): continue for tf_name, tf_data in tfs.items(): @@ -129,9 +203,10 @@ class Resampler: logging.info(f" - {tf_name:<10}: {total:,} candles") def _get_last_timestamp(self, conn, table_name): - """Gets the timestamp of the last entry in a table.""" + """Gets the timestamp of the last entry in a table as a pandas Timestamp.""" try: - return pd.read_sql(f'SELECT MAX(datetime_utc) FROM "{table_name}"', conn).iloc[0, 0] + timestamp_str = pd.read_sql(f'SELECT MAX(datetime_utc) FROM "{table_name}"', conn).iloc[0, 0] + return pd.to_datetime(timestamp_str) if timestamp_str else None except (pd.io.sql.DatabaseError, IndexError): return None @@ -151,7 +226,6 @@ class Resampler: self.resampling_status['job_start_time_utc'] = self.job_start_time.strftime('%Y-%m-%d %H:%M:%S') self.resampling_status['job_stop_time_utc'] = stop_time.strftime('%Y-%m-%d %H:%M:%S') - # Clean up old key if it exists from previous versions self.resampling_status.pop('last_completed_utc', None) try: @@ -167,14 +241,24 @@ def parse_timeframes(tf_strings: list) -> dict: tf_map = {} for tf_str in tf_strings: numeric_part = ''.join(filter(str.isdigit, tf_str)) - unit = ''.join(filter(str.isalpha, tf_str)).lower() + unit = ''.join(filter(str.isalpha, tf_str)) # Keep case for 'M' + key = tf_str code = '' - if unit == 'm': code = f"{numeric_part}min" - elif unit == 'w': code = f"{numeric_part}W" - elif unit in ['h', 'd']: code = f"{numeric_part}{unit}" - else: code = tf_str - tf_map[tf_str] = code + if unit == 'm': + code = f"{numeric_part}min" + elif unit.lower() == 'w': + code = f"{numeric_part}W-MON" + elif unit == 'M': + code = f"{numeric_part}MS" + key = f"{numeric_part}month" + elif unit.lower() in ['h', 'd']: + code = f"{numeric_part}{unit.lower()}" + else: + code = tf_str + logging.warning(f"Unrecognized timeframe unit in '{tf_str}'. Using as-is.") + + tf_map[key] = code return tf_map From 75c0cc77cca3dab51ebecc5bf256299953040085 Mon Sep 17 00:00:00 2001 From: DiTus Date: Tue, 21 Oct 2025 23:52:32 +0200 Subject: [PATCH 04/18] save market cap of all coins --- coin_id_map.py | 80 +++++++++++++++++++++++++++++++++++++++++++ market_cap_fetcher.py | 73 +++++++++++++++++++-------------------- 2 files changed, 116 insertions(+), 37 deletions(-) create mode 100644 coin_id_map.py diff --git a/coin_id_map.py b/coin_id_map.py new file mode 100644 index 0000000..3f4608d --- /dev/null +++ b/coin_id_map.py @@ -0,0 +1,80 @@ +import os +import json +import logging +import requests +from hyperliquid.info import Info +from hyperliquid.utils import constants + +from logging_utils import setup_logging + +def update_coin_mapping(): + """ + Fetches all assets from Hyperliquid and all coins from CoinGecko, + then creates and saves a mapping from the Hyperliquid symbol to the + CoinGecko ID. + """ + setup_logging('normal', 'CoinMapUpdater') + logging.info("Starting coin mapping update process...") + + # --- 1. Fetch all assets from Hyperliquid --- + try: + logging.info("Fetching assets from Hyperliquid...") + info = Info(constants.MAINNET_API_URL, skip_ws=True) + # The meta object contains the 'universe' list with asset details + meta, asset_contexts = info.meta_and_asset_ctxs() + + # --- FIX: The asset names are in the 'universe' list inside the meta object --- + # The 'universe' is a list of dictionaries, each with a 'name' + hyperliquid_assets = [asset['name'] for asset in meta['universe']] + + logging.info(f"Found {len(hyperliquid_assets)} assets on Hyperliquid.") + except Exception as e: + logging.error(f"Failed to fetch assets from Hyperliquid: {e}") + return + + # --- 2. Fetch all coins from CoinGecko --- + try: + logging.info("Fetching coin list from CoinGecko...") + response = requests.get("https://api.coingecko.com/api/v3/coins/list") + response.raise_for_status() + coingecko_coins = response.json() + # Create a lookup table: {symbol: id} + coingecko_lookup = {coin['symbol'].upper(): coin['id'] for coin in coingecko_coins} + logging.info(f"Found {len(coingecko_coins)} coins on CoinGecko.") + except requests.exceptions.RequestException as e: + logging.error(f"Failed to fetch coin list from CoinGecko: {e}") + return + + # --- 3. Create the mapping --- + final_mapping = {} + manual_overrides = { + "HYPE": "hyperliquid", + "PUMP": "pump-fun", + "ASTER": "astar", + } + + logging.info("Generating symbol-to-id mapping...") + for asset_symbol in hyperliquid_assets: + # Check for manual overrides first + if asset_symbol in manual_overrides: + final_mapping[asset_symbol] = manual_overrides[asset_symbol] + continue + + # Try to find a direct match in the CoinGecko lookup table + if asset_symbol in coingecko_lookup: + final_mapping[asset_symbol] = coingecko_lookup[asset_symbol] + else: + logging.warning(f"No direct match found for '{asset_symbol}' on CoinGecko. It will be excluded.") + + # --- 4. Save the mapping to a file --- + map_file_path = os.path.join("_data", "coin_id_map.json") + try: + with open(map_file_path, 'w', encoding='utf-8') as f: + json.dump(final_mapping, f, indent=4, sort_keys=True) + logging.info(f"Successfully saved new coin mapping with {len(final_mapping)} entries to '{map_file_path}'.") + except IOError as e: + logging.error(f"Failed to write coin mapping file: {e}") + +if __name__ == "__main__": + update_coin_mapping() + diff --git a/market_cap_fetcher.py b/market_cap_fetcher.py index ac25ba6..95877f0 100644 --- a/market_cap_fetcher.py +++ b/market_cap_fetcher.py @@ -8,47 +8,52 @@ import requests import time from datetime import datetime, timezone, timedelta import json +from dotenv import load_dotenv + +load_dotenv() -# Assuming logging_utils.py is in the same directory from logging_utils import setup_logging class MarketCapFetcher: """ Fetches historical daily market cap data from the CoinGecko API and - intelligently updates the SQLite database. It processes individual coins, - aggregates stablecoins, and captures total market cap metrics. + intelligently updates the SQLite database for all coins found in the coin map. """ - COIN_ID_MAP = { - "BTC": "bitcoin", - "ETH": "ethereum", - "SOL": "solana", - "BNB": "binancecoin", - "HYPE": "hyperliquid", - "ASTER": "astar", - "ZEC": "zcash", - "PUMP": "pump-fun", # Correct ID is 'pump-fun' - "SUI": "sui" - } - - STABLECOIN_ID_MAP = { - "USDT": "tether", - "USDC": "usd-coin", - "USDE": "ethena-usde", - "DAI": "dai", - "PYUSD": "paypal-usd" - } - - def __init__(self, log_level: str, coins: list): + def __init__(self, log_level: str): setup_logging(log_level, 'MarketCapFetcher') - self.coins_to_fetch = coins self.db_path = os.path.join("_data", "market_data.db") self.api_base_url = "https://api.coingecko.com/api/v3" self.api_key = os.environ.get("COINGECKO_API_KEY") - if not self.api_key: logging.error("CoinGecko API key not found. Please set the COINGECKO_API_KEY environment variable.") sys.exit(1) + + self.COIN_ID_MAP = self._load_coin_id_map() + if not self.COIN_ID_MAP: + logging.error("Coin ID map is empty. Run 'update_coin_map.py' to generate it.") + sys.exit(1) + + # --- FIX: The list of coins to fetch is now all coins from the map --- + self.coins_to_fetch = list(self.COIN_ID_MAP.keys()) + + self.STABLECOIN_ID_MAP = { + "USDT": "tether", + "USDC": "usd-coin", + "USDE": "ethena-usde", + "DAI": "dai", + "PYUSD": "paypal-usd" + } + + def _load_coin_id_map(self) -> dict: + """Loads the dynamically generated coin-to-id mapping.""" + map_file_path = os.path.join("_data", "coin_id_map.json") + try: + with open(map_file_path, 'r') as f: + return json.load(f) + except (FileNotFoundError, json.JSONDecodeError) as e: + logging.error(f"Could not load '{map_file_path}'. Please run 'update_coin_map.py' first. Error: {e}") + return {} def run(self): """ @@ -58,7 +63,7 @@ class MarketCapFetcher: with sqlite3.connect(self.db_path) as conn: conn.execute("PRAGMA journal_mode=WAL;") - # 1. Process individual coins + # 1. Process individual coins from the map for coin_symbol in self.coins_to_fetch: coin_id = self.COIN_ID_MAP.get(coin_symbol.upper()) if not coin_id: @@ -123,7 +128,6 @@ class MarketCapFetcher: table_name = "TOTAL_market_cap_daily" try: - # --- FIX: Use the current date instead of yesterday's --- today_date = datetime.now(timezone.utc).date() cursor = conn.cursor() @@ -131,7 +135,6 @@ class MarketCapFetcher: table_exists = cursor.fetchone() if table_exists: - # Check if we already have a record for today cursor.execute(f"SELECT 1 FROM \"{table_name}\" WHERE date(datetime_utc) = ? LIMIT 1", (today_date.isoformat(),)) if cursor.fetchone(): logging.info(f"Total market cap for {today_date} already exists. Skipping.") @@ -245,7 +248,7 @@ class MarketCapFetcher: try: logging.debug(f"Fetching last {days} days for {coin_id}...") - response = requests.get(url, headers=headers) + response = requests.get(url, headers=headers, params=params) response.raise_for_status() data = response.json() @@ -264,12 +267,7 @@ class MarketCapFetcher: if __name__ == "__main__": parser = argparse.ArgumentParser(description="Fetch historical market cap data from CoinGecko.") - parser.add_argument( - "--coins", - nargs='+', - default=["BTC", "ETH", "SOL", "BNB", "HYPE", "ASTER", "ZEC", "PUMP", "SUI"], - help="List of coin symbols to fetch (e.g., BTC ETH)." - ) + # --- FIX: The --coins argument is no longer needed as the script is now fully automated --- parser.add_argument( "--log-level", default="normal", @@ -278,6 +276,7 @@ if __name__ == "__main__": ) args = parser.parse_args() - fetcher = MarketCapFetcher(log_level=args.log_level, coins=args.coins) + # The 'coins' argument is no longer passed to the constructor + fetcher = MarketCapFetcher(log_level=args.log_level) fetcher.run() From afbb4e497654622ad57c4e350ea6021a82333045 Mon Sep 17 00:00:00 2001 From: DiTus Date: Tue, 21 Oct 2025 23:53:06 +0200 Subject: [PATCH 05/18] wallet info --- wallet_data.py | 664 +++++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 664 insertions(+) create mode 100644 wallet_data.py diff --git a/wallet_data.py b/wallet_data.py new file mode 100644 index 0000000..0d0a5ab --- /dev/null +++ b/wallet_data.py @@ -0,0 +1,664 @@ +#!/usr/bin/env python3 +""" +Hyperliquid Wallet Data Fetcher - Perfect Table Alignment +========================================================== +Complete Python script to pull all available data for a Hyperliquid wallet via API. + +Requirements: + pip install hyperliquid-python-sdk + +Usage: + python hyperliquid_wallet_data.py + +Example: + python hyperliquid_wallet_data.py 0xcd5051944f780a621ee62e39e493c489668acf4d +""" + +import sys +import json +from datetime import datetime, timedelta +from typing import Optional, Dict, Any +from hyperliquid.info import Info +from hyperliquid.utils import constants + + +class HyperliquidWalletAnalyzer: + """ + Comprehensive wallet data analyzer for Hyperliquid exchange. + Fetches all available information about a specific wallet address. + """ + + def __init__(self, wallet_address: str, use_testnet: bool = False): + """ + Initialize the analyzer with a wallet address. + + Args: + wallet_address: Ethereum-style address (0x...) + use_testnet: If True, use testnet instead of mainnet + """ + self.wallet_address = wallet_address + api_url = constants.TESTNET_API_URL if use_testnet else constants.MAINNET_API_URL + + # Initialize Info API (read-only, no private keys needed) + self.info = Info(api_url, skip_ws=True) + print(f"Initialized Hyperliquid API: {'Testnet' if use_testnet else 'Mainnet'}") + print(f"Target wallet: {wallet_address}\n") + + def print_position_details(self, position: Dict[str, Any], index: int): + """ + Print detailed information about a single position. + + Args: + position: Position data dictionary + index: Position number for display + """ + pos = position.get('position', {}) + + # Extract all position details + coin = pos.get('coin', 'Unknown') + size = float(pos.get('szi', 0)) + entry_px = float(pos.get('entryPx', 0)) + position_value = float(pos.get('positionValue', 0)) + unrealized_pnl = float(pos.get('unrealizedPnl', 0)) + return_on_equity = float(pos.get('returnOnEquity', 0)) + + # Leverage details + leverage = pos.get('leverage', {}) + leverage_type = leverage.get('type', 'unknown') if isinstance(leverage, dict) else 'cross' + leverage_value = leverage.get('value', 0) if isinstance(leverage, dict) else 0 + + # Margin and liquidation + margin_used = float(pos.get('marginUsed', 0)) + liquidation_px = pos.get('liquidationPx') + max_trade_szs = pos.get('maxTradeSzs', [0, 0]) + + # Cumulative funding + cumulative_funding = float(pos.get('cumFunding', {}).get('allTime', 0)) + + # Determine if long or short + side = "LONG šŸ“ˆ" if size > 0 else "SHORT šŸ“‰" + side_color = "🟢" if size > 0 else "šŸ”“" + + # PnL color + pnl_symbol = "🟢" if unrealized_pnl >= 0 else "šŸ”“" + pnl_sign = "+" if unrealized_pnl >= 0 else "" + + # ROE color + roe_symbol = "🟢" if return_on_equity >= 0 else "šŸ”“" + roe_sign = "+" if return_on_equity >= 0 else "" + + print(f"\n{'='*80}") + print(f"POSITION #{index}: {coin} {side} {side_color}") + print(f"{'='*80}") + + print(f"\nšŸ“Š POSITION DETAILS:") + print(f" Size: {abs(size):.6f} {coin}") + print(f" Side: {side}") + print(f" Entry Price: ${entry_px:,.4f}") + print(f" Position Value: ${abs(position_value):,.2f}") + + print(f"\nšŸ’° PROFITABILITY:") + print(f" Unrealized PnL: {pnl_symbol} {pnl_sign}${unrealized_pnl:,.2f}") + print(f" Return on Equity: {roe_symbol} {roe_sign}{return_on_equity:.2%}") + print(f" Cumulative Funding: ${cumulative_funding:,.4f}") + + print(f"\nāš™ļø LEVERAGE & MARGIN:") + print(f" Leverage Type: {leverage_type.upper()}") + print(f" Leverage: {leverage_value}x") + print(f" Margin Used: ${margin_used:,.2f}") + + print(f"\nāš ļø RISK MANAGEMENT:") + if liquidation_px: + liquidation_px_float = float(liquidation_px) if liquidation_px else 0 + print(f" Liquidation Price: ${liquidation_px_float:,.4f}") + + # Calculate distance to liquidation + if entry_px > 0 and liquidation_px_float > 0: + if size > 0: # Long position + distance = ((entry_px - liquidation_px_float) / entry_px) * 100 + else: # Short position + distance = ((liquidation_px_float - entry_px) / entry_px) * 100 + + distance_symbol = "🟢" if abs(distance) > 20 else "🟔" if abs(distance) > 10 else "šŸ”“" + print(f" Distance to Liq: {distance_symbol} {abs(distance):.2f}%") + else: + print(f" Liquidation Price: N/A (Cross margin)") + + if max_trade_szs and len(max_trade_szs) == 2: + print(f" Max Long Trade: {max_trade_szs[0]}") + print(f" Max Short Trade: {max_trade_szs[1]}") + + print(f"\n{'='*80}") + + def get_user_state(self) -> Dict[str, Any]: + """ + Get complete user state including positions and margin summary. + + Returns: + Dict containing: + - assetPositions: List of open perpetual positions + - marginSummary: Account value, margin used, withdrawable + - crossMarginSummary: Cross margin details + - withdrawable: Available balance to withdraw + """ + print("šŸ“Š Fetching User State (Perpetuals)...") + try: + data = self.info.user_state(self.wallet_address) + + if data: + margin_summary = data.get('marginSummary', {}) + positions = data.get('assetPositions', []) + + account_value = float(margin_summary.get('accountValue', 0)) + total_margin_used = float(margin_summary.get('totalMarginUsed', 0)) + total_ntl_pos = float(margin_summary.get('totalNtlPos', 0)) + total_raw_usd = float(margin_summary.get('totalRawUsd', 0)) + withdrawable = float(data.get('withdrawable', 0)) + + print(f" āœ“ Account Value: ${account_value:,.2f}") + print(f" āœ“ Total Margin Used: ${total_margin_used:,.2f}") + print(f" āœ“ Total Position Value: ${total_ntl_pos:,.2f}") + print(f" āœ“ Withdrawable: ${withdrawable:,.2f}") + print(f" āœ“ Open Positions: {len(positions)}") + + # Calculate margin utilization + if account_value > 0: + margin_util = (total_margin_used / account_value) * 100 + util_symbol = "🟢" if margin_util < 50 else "🟔" if margin_util < 75 else "šŸ”“" + print(f" āœ“ Margin Utilization: {util_symbol} {margin_util:.2f}%") + + # Print detailed information for each position + if positions: + print(f"\n{'='*80}") + print(f"DETAILED POSITION BREAKDOWN ({len(positions)} positions)") + print(f"{'='*80}") + + for idx, position in enumerate(positions, 1): + self.print_position_details(position, idx) + + # Summary table with perfect alignment + self.print_positions_summary_table(positions) + + else: + print(" ⚠ No perpetual positions found") + + return data + except Exception as e: + print(f" āœ— Error: {e}") + return {} + + def print_positions_summary_table(self, positions: list): + """ + Print a summary table of all positions with properly aligned vertical separators. + + Args: + positions: List of position dictionaries + """ + print(f"\n{'='*130}") + print(f"POSITIONS SUMMARY TABLE") + print(f"{'='*130}") + + # Header with vertical separators + print("| Asset | Side | Size | Entry Price | Position Value | Unrealized PnL | ROE | Leverage |") + print("|----------|-----------|------------------|------------------|--------------------|--------------------|------------|------------|") + + total_position_value = 0 + total_pnl = 0 + + for position in positions: + pos = position.get('position', {}) + + coin = pos.get('coin', 'Unknown') + size = float(pos.get('szi', 0)) + entry_px = float(pos.get('entryPx', 0)) + position_value = float(pos.get('positionValue', 0)) + unrealized_pnl = float(pos.get('unrealizedPnl', 0)) + return_on_equity = float(pos.get('returnOnEquity', 0)) + + # Get leverage + leverage = pos.get('leverage', {}) + leverage_value = leverage.get('value', 0) if isinstance(leverage, dict) else 0 + leverage_type = leverage.get('type', 'cross') if isinstance(leverage, dict) else 'cross' + + side_text = "LONG" if size > 0 else "SHORT" + side_emoji = "šŸ“ˆ" if size > 0 else "šŸ“‰" + + # Add color indicators (using text instead of emojis for alignment) + pnl_sign = "+" if unrealized_pnl >= 0 else "" + + # Accumulate totals + total_position_value += abs(position_value) + total_pnl += unrealized_pnl + + # Format numbers with proper width - no emojis in the data + size_str = f"{abs(size):,.4f}" + entry_str = f"${entry_px:,.2f}" + value_str = f"${abs(position_value):,.2f}" + pnl_str = f"{pnl_sign}${unrealized_pnl:,.2f}" + roe_str = f"{return_on_equity:+.2%}" + lev_str = f"{leverage_value}x {leverage_type[:4]}" + + # Use fixed width with ljust/rjust for proper alignment + row = (f"| {coin[:8]:<8} " + f"| {side_text:<5} {side_emoji} " + f"| {size_str:>16} " + f"| {entry_str:>16} " + f"| {value_str:>18} " + f"| {pnl_str:>18} " + f"| {roe_str:>10} " + f"| {lev_str:<10} |") + print(row) + + # Separator before totals + print("|==========|===========|==================|==================|====================|====================|============|============|") + + # Total row + total_value_str = f"${total_position_value:,.2f}" + total_pnl_sign = "+" if total_pnl >= 0 else "" + total_pnl_str = f"{total_pnl_sign}${total_pnl:,.2f}" + + total_row = (f"| {'TOTAL':<8} " + f"| {'':<9} " + f"| {'':<16} " + f"| {'':<16} " + f"| {total_value_str:>18} " + f"| {total_pnl_str:>18} " + f"| {'':<10} " + f"| {'':<10} |") + print(total_row) + print(f"{'='*130}\n") + + def get_spot_state(self) -> Dict[str, Any]: + """ + Get spot trading state including token balances. + + Returns: + Dict containing: + - balances: List of spot token holdings + """ + print("\nšŸ’° Fetching Spot State...") + try: + data = self.info.spot_user_state(self.wallet_address) + + if data and data.get('balances'): + print(f" āœ“ Spot Holdings: {len(data['balances'])} tokens") + for balance in data['balances'][:5]: # Show first 5 + print(f" - {balance.get('coin', 'Unknown')}: {balance.get('total', 0)}") + else: + print(" ⚠ No spot holdings found") + + return data + except Exception as e: + print(f" āœ— Error: {e}") + return {} + + def get_open_orders(self) -> list: + """ + Get all open orders for the user. + + Returns: + List of open orders with details (price, size, side, etc.) + """ + print("\nšŸ“‹ Fetching Open Orders...") + try: + data = self.info.open_orders(self.wallet_address) + + if data: + print(f" āœ“ Open Orders: {len(data)}") + for order in data[:3]: # Show first 3 + coin = order.get('coin', 'Unknown') + side = order.get('side', 'Unknown') + size = order.get('sz', 0) + price = order.get('limitPx', 0) + print(f" - {coin} {side}: {size} @ ${price}") + else: + print(" ⚠ No open orders") + + return data + except Exception as e: + print(f" āœ— Error: {e}") + return [] + + def get_user_fills(self, limit: int = 100) -> list: + """ + Get recent trade fills (executions). + + Args: + limit: Maximum number of fills to retrieve (max 2000) + + Returns: + List of fills with execution details, PnL, timestamps + """ + print(f"\nšŸ“ˆ Fetching Recent Fills (last {limit})...") + try: + data = self.info.user_fills(self.wallet_address) + + if data: + fills = data[:limit] + print(f" āœ“ Total Fills Retrieved: {len(fills)}") + + # Show summary stats + total_pnl = sum(float(f.get('closedPnl', 0)) for f in fills if f.get('closedPnl')) + print(f" āœ“ Total Closed PnL: ${total_pnl:.2f}") + + # Show most recent + if fills: + recent = fills[0] + print(f" āœ“ Most Recent: {recent.get('coin')} {recent.get('side')} {recent.get('sz')} @ ${recent.get('px')}") + else: + print(" ⚠ No fills found") + + return data[:limit] if data else [] + except Exception as e: + print(f" āœ— Error: {e}") + return [] + + def get_user_fills_by_time(self, start_time: Optional[int] = None, + end_time: Optional[int] = None) -> list: + """ + Get fills within a specific time range. + + Args: + start_time: Start timestamp in milliseconds (default: 7 days ago) + end_time: End timestamp in milliseconds (default: now) + + Returns: + List of fills within the time range + """ + if not start_time: + start_time = int((datetime.now() - timedelta(days=7)).timestamp() * 1000) + if not end_time: + end_time = int(datetime.now().timestamp() * 1000) + + print(f"\nšŸ“… Fetching Fills by Time Range...") + print(f" From: {datetime.fromtimestamp(start_time/1000)}") + print(f" To: {datetime.fromtimestamp(end_time/1000)}") + + try: + data = self.info.user_fills_by_time(self.wallet_address, start_time, end_time) + + if data: + print(f" āœ“ Fills in Range: {len(data)}") + else: + print(" ⚠ No fills in this time range") + + return data + except Exception as e: + print(f" āœ— Error: {e}") + return [] + + def get_user_fees(self) -> Dict[str, Any]: + """ + Get user's fee schedule and trading volume. + + Returns: + Dict containing: + - feeSchedule: Fee rates by tier + - userCrossRate: User's current cross trading fee rate + - userAddRate: User's maker fee rate + - userWithdrawRate: Withdrawal fee rate + - dailyUserVlm: Daily trading volume + """ + print("\nšŸ’³ Fetching Fee Information...") + try: + data = self.info.user_fees(self.wallet_address) + + if data: + print(f" āœ“ Maker Fee: {data.get('userAddRate', 0)}%") + print(f" āœ“ Taker Fee: {data.get('userCrossRate', 0)}%") + print(f" āœ“ Daily Volume: ${data.get('dailyUserVlm', [0])[0] if data.get('dailyUserVlm') else 0}") + + return data + except Exception as e: + print(f" āœ— Error: {e}") + return {} + + def get_user_rate_limit(self) -> Dict[str, Any]: + """ + Get API rate limit information. + + Returns: + Dict containing: + - cumVlm: Cumulative trading volume + - nRequestsUsed: Number of requests used + - nRequestsCap: Request capacity + """ + print("\nā±ļø Fetching Rate Limit Info...") + try: + data = self.info.user_rate_limit(self.wallet_address) + + if data: + used = data.get('nRequestsUsed', 0) + cap = data.get('nRequestsCap', 0) + print(f" āœ“ API Requests: {used}/{cap}") + print(f" āœ“ Cumulative Volume: ${data.get('cumVlm', 0)}") + + return data + except Exception as e: + print(f" āœ— Error: {e}") + return {} + + def get_funding_history(self, coin: str, days: int = 7) -> list: + """ + Get funding rate history for a specific coin. + + Args: + coin: Asset symbol (e.g., 'BTC', 'ETH') + days: Number of days of history (default: 7) + + Returns: + List of funding rate entries + """ + end_time = int(datetime.now().timestamp() * 1000) + start_time = int((datetime.now() - timedelta(days=days)).timestamp() * 1000) + + print(f"\nšŸ“Š Fetching Funding History for {coin}...") + try: + data = self.info.funding_history(coin, start_time, end_time) + + if data: + print(f" āœ“ Funding Entries: {len(data)}") + if data: + latest = data[-1] + print(f" āœ“ Latest Rate: {latest.get('fundingRate', 0)}") + + return data + except Exception as e: + print(f" āœ— Error: {e}") + return [] + + def get_user_funding_history(self, days: int = 7) -> list: + """ + Get user's funding payments history. + + Args: + days: Number of days of history (default: 7) + + Returns: + List of funding payments + """ + end_time = int(datetime.now().timestamp() * 1000) + start_time = int((datetime.now() - timedelta(days=days)).timestamp() * 1000) + + print(f"\nšŸ’ø Fetching User Funding Payments (last {days} days)...") + try: + data = self.info.user_funding_history(self.wallet_address, start_time, end_time) + + if data: + print(f" āœ“ Funding Payments: {len(data)}") + total_funding = sum(float(f.get('usdc', 0)) for f in data) + print(f" āœ“ Total Funding P&L: ${total_funding:.2f}") + else: + print(" ⚠ No funding payments found") + + return data + except Exception as e: + print(f" āœ— Error: {e}") + return [] + + def get_user_non_funding_ledger_updates(self, days: int = 7) -> list: + """ + Get non-funding ledger updates (deposits, withdrawals, liquidations). + + Args: + days: Number of days of history (default: 7) + + Returns: + List of ledger updates + """ + end_time = int(datetime.now().timestamp() * 1000) + start_time = int((datetime.now() - timedelta(days=days)).timestamp() * 1000) + + print(f"\nšŸ“’ Fetching Ledger Updates (last {days} days)...") + try: + data = self.info.user_non_funding_ledger_updates(self.wallet_address, start_time, end_time) + + if data: + print(f" āœ“ Ledger Updates: {len(data)}") + # Categorize updates + deposits = [u for u in data if 'deposit' in str(u.get('delta', {})).lower()] + withdrawals = [u for u in data if 'withdraw' in str(u.get('delta', {})).lower()] + print(f" āœ“ Deposits: {len(deposits)}, Withdrawals: {len(withdrawals)}") + else: + print(" ⚠ No ledger updates found") + + return data + except Exception as e: + print(f" āœ— Error: {e}") + return [] + + def get_referral_state(self) -> Dict[str, Any]: + """ + Get referral program state for the user. + + Returns: + Dict with referral status and earnings + """ + print("\nšŸŽ Fetching Referral State...") + try: + data = self.info.query_referral_state(self.wallet_address) + + if data: + print(f" āœ“ Referral Code: {data.get('referralCode', 'N/A')}") + print(f" āœ“ Referees: {len(data.get('referees', []))}") + + return data + except Exception as e: + print(f" āœ— Error: {e}") + return {} + + def get_sub_accounts(self) -> list: + """ + Get list of sub-accounts for the user. + + Returns: + List of sub-account addresses + """ + print("\nšŸ‘„ Fetching Sub-Accounts...") + try: + data = self.info.query_sub_accounts(self.wallet_address) + + if data: + print(f" āœ“ Sub-Accounts: {len(data)}") + else: + print(" ⚠ No sub-accounts found") + + return data + except Exception as e: + print(f" āœ— Error: {e}") + return [] + + def fetch_all_data(self, save_to_file: bool = True) -> Dict[str, Any]: + """ + Fetch all available data for the wallet. + + Args: + save_to_file: If True, save results to JSON file + + Returns: + Dict containing all fetched data + """ + print("=" * 80) + print("HYPERLIQUID WALLET DATA FETCHER") + print("=" * 80) + + all_data = { + 'wallet_address': self.wallet_address, + 'timestamp': datetime.now().isoformat(), + 'data': {} + } + + # Fetch all data sections + all_data['data']['user_state'] = self.get_user_state() + all_data['data']['spot_state'] = self.get_spot_state() + all_data['data']['open_orders'] = self.get_open_orders() + all_data['data']['recent_fills'] = self.get_user_fills(limit=50) + all_data['data']['fills_last_7_days'] = self.get_user_fills_by_time() + all_data['data']['user_fees'] = self.get_user_fees() + all_data['data']['rate_limit'] = self.get_user_rate_limit() + all_data['data']['funding_payments'] = self.get_user_funding_history(days=7) + all_data['data']['ledger_updates'] = self.get_user_non_funding_ledger_updates(days=7) + all_data['data']['referral_state'] = self.get_referral_state() + all_data['data']['sub_accounts'] = self.get_sub_accounts() + + # Optional: Fetch funding history for positions + user_state = all_data['data']['user_state'] + if user_state and user_state.get('assetPositions'): + all_data['data']['funding_history'] = {} + for position in user_state['assetPositions'][:3]: # First 3 positions + coin = position.get('position', {}).get('coin') + if coin: + all_data['data']['funding_history'][coin] = self.get_funding_history(coin, days=7) + + print("\n" + "=" * 80) + print("DATA COLLECTION COMPLETE") + print("=" * 80) + + # Save to file + if save_to_file: + filename = f"hyperliquid_wallet_data_{self.wallet_address[:10]}_{datetime.now().strftime('%Y%m%d_%H%M%S')}.json" + with open(filename, 'w') as f: + json.dump(all_data, f, indent=2, default=str) + print(f"\nšŸ’¾ Data saved to: {filename}") + + return all_data + + +def main(): + """Main execution function.""" + if len(sys.argv) < 2: + print("Usage: python hyperliquid_wallet_data.py [--testnet]") + print("\nExample:") + print(" python hyperliquid_wallet_data.py 0xcd5051944f780a621ee62e39e493c489668acf4d") + sys.exit(1) + + wallet_address = sys.argv[1] + use_testnet = '--testnet' in sys.argv + + # Validate wallet address format + if not wallet_address.startswith('0x') or len(wallet_address) != 42: + print("āŒ Error: Invalid wallet address format") + print(" Address must be in format: 0x followed by 40 hexadecimal characters") + sys.exit(1) + + try: + analyzer = HyperliquidWalletAnalyzer(wallet_address, use_testnet=use_testnet) + data = analyzer.fetch_all_data(save_to_file=True) + + print("\nāœ… All data fetched successfully!") + print(f"\nšŸ“Š Summary:") + print(f" - Account Value: ${data['data']['user_state'].get('marginSummary', {}).get('accountValue', 0)}") + print(f" - Open Positions: {len(data['data']['user_state'].get('assetPositions', []))}") + print(f" - Spot Holdings: {len(data['data']['spot_state'].get('balances', []))}") + print(f" - Open Orders: {len(data['data']['open_orders'])}") + print(f" - Recent Fills: {len(data['data']['recent_fills'])}") + + except Exception as e: + print(f"\nāŒ Fatal Error: {e}") + import traceback + traceback.print_exc() + sys.exit(1) + + +if __name__ == "__main__": + main() \ No newline at end of file From 58056012181bc5a35840a2c870f6877aaa3566d6 Mon Sep 17 00:00:00 2001 From: DiTus Date: Wed, 22 Oct 2025 22:22:13 +0200 Subject: [PATCH 06/18] tmiestamp_ms column added to all tables as primary key --- market_cap_fetcher.py | 154 ++++++++++++++++++++++++++---------------- resampler.py | 50 ++++++++------ 2 files changed, 125 insertions(+), 79 deletions(-) diff --git a/market_cap_fetcher.py b/market_cap_fetcher.py index 95877f0..2557828 100644 --- a/market_cap_fetcher.py +++ b/market_cap_fetcher.py @@ -17,7 +17,7 @@ from logging_utils import setup_logging class MarketCapFetcher: """ Fetches historical daily market cap data from the CoinGecko API and - intelligently updates the SQLite database for all coins found in the coin map. + intelligently upserts it into the SQLite database for all coins. """ def __init__(self, log_level: str): @@ -34,16 +34,50 @@ class MarketCapFetcher: logging.error("Coin ID map is empty. Run 'update_coin_map.py' to generate it.") sys.exit(1) - # --- FIX: The list of coins to fetch is now all coins from the map --- self.coins_to_fetch = list(self.COIN_ID_MAP.keys()) self.STABLECOIN_ID_MAP = { - "USDT": "tether", - "USDC": "usd-coin", - "USDE": "ethena-usde", - "DAI": "dai", - "PYUSD": "paypal-usd" + "USDT": "tether", "USDC": "usd-coin", "USDE": "ethena-usde", + "DAI": "dai", "PYUSD": "paypal-usd" } + + # --- ADDED: Ensure all tables have the correct schema --- + self._ensure_tables_exist() + + def _ensure_tables_exist(self): + """Ensures all market cap tables exist with timestamp_ms as PRIMARY KEY.""" + all_tables_to_check = [f"{coin}_market_cap" for coin in self.coins_to_fetch] + all_tables_to_check.extend(["STABLECOINS_market_cap", "TOTAL_market_cap_daily"]) + + with sqlite3.connect(self.db_path) as conn: + for table_name in all_tables_to_check: + cursor = conn.cursor() + cursor.execute(f"PRAGMA table_info('{table_name}')") + columns = cursor.fetchall() + if columns: + pk_found = any(col[1] == 'timestamp_ms' and col[5] == 1 for col in columns) + if not pk_found: + logging.warning(f"Schema for table '{table_name}' is incorrect. Dropping and recreating table.") + try: + conn.execute(f'DROP TABLE "{table_name}"') + self._create_market_cap_table(conn, table_name) + logging.info(f"Successfully recreated schema for '{table_name}'.") + except Exception as e: + logging.error(f"FATAL: Failed to recreate table '{table_name}': {e}. Please delete 'market_data.db' and restart.") + sys.exit(1) + else: + self._create_market_cap_table(conn, table_name) + logging.info("All market cap table schemas verified.") + + def _create_market_cap_table(self, conn, table_name): + """Creates a new market cap table with the correct schema.""" + conn.execute(f''' + CREATE TABLE IF NOT EXISTS "{table_name}" ( + datetime_utc TEXT, + timestamp_ms INTEGER PRIMARY KEY, + market_cap REAL + ) + ''') def _load_coin_id_map(self) -> dict: """Loads the dynamically generated coin-to-id mapping.""" @@ -55,6 +89,27 @@ class MarketCapFetcher: logging.error(f"Could not load '{map_file_path}'. Please run 'update_coin_map.py' first. Error: {e}") return {} + def _upsert_market_cap_data(self, conn, table_name: str, df: pd.DataFrame): + """Upserts a DataFrame of market cap data into the specified table.""" + if df.empty: + return + + records_to_upsert = [] + for index, row in df.iterrows(): + records_to_upsert.append(( + row['datetime_utc'].strftime('%Y-%m-%d %H:%M:%S'), + row['timestamp_ms'], + row['market_cap'] + )) + + cursor = conn.cursor() + cursor.executemany(f''' + INSERT OR REPLACE INTO "{table_name}" (datetime_utc, timestamp_ms, market_cap) + VALUES (?, ?, ?) + ''', records_to_upsert) + conn.commit() + logging.info(f"Successfully upserted {len(records_to_upsert)} records into '{table_name}'.") + def run(self): """ Main execution function to process all configured coins and update the database. @@ -63,7 +118,6 @@ class MarketCapFetcher: with sqlite3.connect(self.db_path) as conn: conn.execute("PRAGMA journal_mode=WAL;") - # 1. Process individual coins from the map for coin_symbol in self.coins_to_fetch: coin_id = self.COIN_ID_MAP.get(coin_symbol.upper()) if not coin_id: @@ -76,30 +130,21 @@ class MarketCapFetcher: logging.error(f"An unexpected error occurred while processing {coin_symbol}: {e}") time.sleep(2) - # 2. Process and aggregate stablecoins self._update_stablecoin_aggregate(conn) - - # 3. Process total market cap metrics self._update_total_market_cap(conn) - - # 4. Save a summary of the latest data self._save_summary(conn) logging.info("--- Market cap fetch process complete ---") def _save_summary(self, conn): - """ - Queries the last record from each market cap table and saves a summary to a JSON file. - """ + # ... (This function is unchanged) logging.info("--- Generating Market Cap Summary ---") summary_data = {} summary_file_path = os.path.join("_data", "market_cap_data.json") - try: cursor = conn.cursor() cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND (name LIKE '%_market_cap' OR name LIKE 'TOTAL_%');") tables = [row[0] for row in cursor.fetchall()] - for table_name in tables: try: df_last = pd.read_sql(f'SELECT * FROM "{table_name}" ORDER BY datetime_utc DESC LIMIT 1', conn) @@ -107,38 +152,24 @@ class MarketCapFetcher: summary_data[table_name] = df_last.to_dict('records')[0] except Exception as e: logging.error(f"Could not read last record from table '{table_name}': {e}") - if summary_data: summary_data['summary_last_updated_utc'] = datetime.now(timezone.utc).isoformat() - with open(summary_file_path, 'w', encoding='utf-8') as f: json.dump(summary_data, f, indent=4) logging.info(f"Successfully saved market cap summary to '{summary_file_path}'") else: logging.warning("No data found to create a summary.") - except Exception as e: logging.error(f"Failed to generate summary: {e}") def _update_total_market_cap(self, conn): - """ - Fetches the current total market cap and saves it for the current date. - """ + """Fetches the current total market cap and upserts it for the current date.""" logging.info("--- Processing Total Market Cap ---") table_name = "TOTAL_market_cap_daily" - try: today_date = datetime.now(timezone.utc).date() - - cursor = conn.cursor() - cursor.execute(f"SELECT name FROM sqlite_master WHERE type='table' AND name='{table_name}';") - table_exists = cursor.fetchone() - - if table_exists: - cursor.execute(f"SELECT 1 FROM \"{table_name}\" WHERE date(datetime_utc) = ? LIMIT 1", (today_date.isoformat(),)) - if cursor.fetchone(): - logging.info(f"Total market cap for {today_date} already exists. Skipping.") - return + today_dt = pd.to_datetime(today_date) + today_ts = int(today_dt.timestamp() * 1000) logging.info("Fetching current global market data...") url = f"{self.api_base_url}/global" @@ -150,10 +181,11 @@ class MarketCapFetcher: if total_mc: df_total = pd.DataFrame([{ - 'datetime_utc': pd.to_datetime(today_date), + 'datetime_utc': today_dt, + 'timestamp_ms': today_ts, 'market_cap': total_mc }]) - df_total.to_sql(table_name, conn, if_exists='append', index=False) + self._upsert_market_cap_data(conn, table_name, df_total) logging.info(f"Saved total market cap for {today_date}: ${total_mc:,.2f}") except requests.exceptions.RequestException as e: @@ -161,7 +193,6 @@ class MarketCapFetcher: except Exception as e: logging.error(f"An error occurred while updating total market cap: {e}") - def _update_stablecoin_aggregate(self, conn): """Fetches data for all stablecoins and saves the aggregated market cap.""" logging.info("--- Processing aggregated stablecoin market cap ---") @@ -171,7 +202,6 @@ class MarketCapFetcher: logging.info(f"Fetching historical data for stablecoin: {symbol}...") df = self._fetch_historical_data(coin_id, days=365) if not df.empty: - df['coin'] = symbol all_stablecoin_df = pd.concat([all_stablecoin_df, df]) time.sleep(2) @@ -179,31 +209,30 @@ class MarketCapFetcher: logging.warning("No data fetched for any stablecoins. Cannot create aggregate.") return - aggregated_df = all_stablecoin_df.groupby(all_stablecoin_df['datetime_utc'].dt.date)['market_cap'].sum().reset_index() - aggregated_df['datetime_utc'] = pd.to_datetime(aggregated_df['datetime_utc']) + aggregated_df = all_stablecoin_df.groupby('timestamp_ms').agg( + datetime_utc=('datetime_utc', 'first'), + market_cap=('market_cap', 'sum') + ).reset_index() table_name = "STABLECOINS_market_cap" - last_date_in_db = self._get_last_date_from_db(table_name, conn) + last_date_in_db = self._get_last_date_from_db(table_name, conn, is_timestamp_ms=True) if last_date_in_db: - aggregated_df = aggregated_df[aggregated_df['datetime_utc'] > last_date_in_db] + aggregated_df = aggregated_df[aggregated_df['timestamp_ms'] > last_date_in_db] if not aggregated_df.empty: - aggregated_df.to_sql(table_name, conn, if_exists='append', index=False) - logging.info(f"Successfully saved {len(aggregated_df)} daily records to '{table_name}'.") + self._upsert_market_cap_data(conn, table_name, aggregated_df) else: logging.info("Aggregated stablecoin data is already up-to-date.") - def _update_market_cap_for_coin(self, coin_id: str, coin_symbol: str, conn): """Fetches and appends new market cap data for a single coin.""" table_name = f"{coin_symbol}_market_cap" - - last_date_in_db = self._get_last_date_from_db(table_name, conn) + last_date_in_db = self._get_last_date_from_db(table_name, conn, is_timestamp_ms=True) days_to_fetch = 365 if last_date_in_db: - delta_days = (datetime.now() - last_date_in_db).days + delta_days = (datetime.now(timezone.utc) - datetime.fromtimestamp(last_date_in_db/1000, tz=timezone.utc)).days if delta_days <= 0: logging.info(f"Market cap data for '{coin_symbol}' is already up-to-date.") return @@ -218,24 +247,30 @@ class MarketCapFetcher: return if last_date_in_db: - df = df[df['datetime_utc'] > last_date_in_db] + df = df[df['timestamp_ms'] > last_date_in_db] if not df.empty: - df.to_sql(table_name, conn, if_exists='append', index=False) - logging.info(f"Successfully saved {len(df)} new daily market cap records for {coin_symbol}.") + self._upsert_market_cap_data(conn, table_name, df) else: logging.info(f"Data was fetched, but no new records needed saving for '{coin_symbol}'.") - def _get_last_date_from_db(self, table_name: str, conn) -> pd.Timestamp: - """Gets the most recent date from a market cap table as a pandas Timestamp.""" + def _get_last_date_from_db(self, table_name: str, conn, is_timestamp_ms: bool = False): + """Gets the most recent date or timestamp from a market cap table.""" try: cursor = conn.cursor() cursor.execute(f"SELECT name FROM sqlite_master WHERE type='table' AND name='{table_name}';") if not cursor.fetchone(): return None + + col_to_query = "timestamp_ms" if is_timestamp_ms else "datetime_utc" + last_val = pd.read_sql(f'SELECT MAX({col_to_query}) FROM "{table_name}"', conn).iloc[0, 0] + + if pd.isna(last_val): + return None + if is_timestamp_ms: + return int(last_val) + return pd.to_datetime(last_val) - last_date_str = pd.read_sql(f'SELECT MAX(datetime_utc) FROM "{table_name}"', conn).iloc[0, 0] - return pd.to_datetime(last_date_str) if last_date_str else None except Exception as e: logging.error(f"Could not read last date from table '{table_name}': {e}") return None @@ -256,9 +291,10 @@ class MarketCapFetcher: if not market_caps: return pd.DataFrame() df = pd.DataFrame(market_caps, columns=['timestamp_ms', 'market_cap']) + # --- FIX: Convert to datetime object, but do not format as string --- df['datetime_utc'] = pd.to_datetime(df['timestamp_ms'], unit='ms') - df.drop_duplicates(subset=['datetime_utc'], keep='last', inplace=True) - return df[['datetime_utc', 'market_cap']] + df.drop_duplicates(subset=['timestamp_ms'], keep='last', inplace=True) + return df[['datetime_utc', 'timestamp_ms', 'market_cap']] except requests.exceptions.RequestException as e: logging.error(f"API request failed for {coin_id}: {e}.") @@ -267,7 +303,6 @@ class MarketCapFetcher: if __name__ == "__main__": parser = argparse.ArgumentParser(description="Fetch historical market cap data from CoinGecko.") - # --- FIX: The --coins argument is no longer needed as the script is now fully automated --- parser.add_argument( "--log-level", default="normal", @@ -276,7 +311,6 @@ if __name__ == "__main__": ) args = parser.parse_args() - # The 'coins' argument is no longer passed to the constructor fetcher = MarketCapFetcher(log_level=args.log_level) fetcher.run() diff --git a/resampler.py b/resampler.py index ea489b9..ffbeb59 100644 --- a/resampler.py +++ b/resampler.py @@ -37,7 +37,7 @@ class Resampler: def _ensure_tables_exist(self): """ - Ensures all resampled tables exist with a PRIMARY KEY on datetime_utc. + Ensures all resampled tables exist with a PRIMARY KEY on timestamp_ms. Attempts to migrate existing tables if the schema is incorrect. """ with sqlite3.connect(self.db_path) as conn: @@ -48,13 +48,22 @@ class Resampler: cursor.execute(f"PRAGMA table_info('{table_name}')") columns = cursor.fetchall() if columns: - pk_found = any(col[1] == 'datetime_utc' and col[5] == 1 for col in columns) + # --- FIX: Check for the correct PRIMARY KEY on timestamp_ms --- + pk_found = any(col[1] == 'timestamp_ms' and col[5] == 1 for col in columns) if not pk_found: logging.warning(f"Schema migration needed for table '{table_name}'.") try: conn.execute(f'ALTER TABLE "{table_name}" RENAME TO "{table_name}_old"') self._create_resampled_table(conn, table_name) - conn.execute(f'INSERT OR IGNORE INTO "{table_name}" SELECT * FROM "{table_name}_old"') + # Copy data, ensuring to create the timestamp_ms + logging.info(f" -> Migrating data for '{table_name}'...") + old_df = pd.read_sql(f'SELECT * FROM "{table_name}_old"', conn, parse_dates=['datetime_utc']) + if not old_df.empty: + old_df['timestamp_ms'] = (old_df['datetime_utc'].astype('int64') // 10**6) + # Keep only unique timestamps, preserving the last entry + old_df.drop_duplicates(subset=['timestamp_ms'], keep='last', inplace=True) + old_df.to_sql(table_name, conn, if_exists='append', index=False) + logging.info(f" -> Data migration complete.") conn.execute(f'DROP TABLE "{table_name}_old"') conn.commit() logging.info(f"Successfully migrated schema for '{table_name}'.") @@ -67,9 +76,11 @@ class Resampler: def _create_resampled_table(self, conn, table_name): """Creates a new resampled table with the correct schema.""" + # --- FIX: Set PRIMARY KEY on timestamp_ms for performance and uniqueness --- conn.execute(f''' CREATE TABLE "{table_name}" ( - datetime_utc TEXT PRIMARY KEY, + datetime_utc TEXT, + timestamp_ms INTEGER PRIMARY KEY, open REAL, high REAL, low REAL, @@ -123,22 +134,21 @@ class Resampler: source_table_name = f"{coin}_1m" logging.debug(f" Updating {tf_name} table...") - last_timestamp = self._get_last_timestamp(conn, target_table_name) + last_timestamp_ms = self._get_last_timestamp(conn, target_table_name) query = f'SELECT * FROM "{source_table_name}"' params = () - if last_timestamp: - query += ' WHERE datetime_utc >= ?' + if last_timestamp_ms: + query += ' WHERE timestamp_ms >= ?' + # Go back one interval to rebuild the last (potentially partial) candle try: - # --- FIX: Try the fast method first --- - interval_delta = pd.to_timedelta(tf_code) - query_start_date = last_timestamp - interval_delta + interval_delta_ms = pd.to_timedelta(tf_code).total_seconds() * 1000 except ValueError: - # --- FIX: Fall back to the safe method for special timeframes --- - logging.debug(f"Cannot create timedelta for '{tf_code}'. Using safe 32-day lookback.") - query_start_date = last_timestamp - timedelta(days=32) + # Fall back to a safe 32-day lookback for special timeframes + interval_delta_ms = timedelta(days=32).total_seconds() * 1000 - params = (query_start_date.strftime('%Y-%m-%d %H:%M:%S'),) + query_start_ms = last_timestamp_ms - interval_delta_ms + params = (query_start_ms,) df_1m = pd.read_sql(query, conn, params=params, parse_dates=['datetime_utc']) @@ -155,14 +165,15 @@ class Resampler: for index, row in resampled_df.iterrows(): records_to_upsert.append(( index.strftime('%Y-%m-%d %H:%M:%S'), + int(index.timestamp() * 1000), # Generate timestamp_ms row['open'], row['high'], row['low'], row['close'], row['volume'], row['number_of_trades'] )) cursor = conn.cursor() cursor.executemany(f''' - INSERT OR REPLACE INTO "{target_table_name}" (datetime_utc, open, high, low, close, volume, number_of_trades) - VALUES (?, ?, ?, ?, ?, ?, ?) + INSERT OR REPLACE INTO "{target_table_name}" (datetime_utc, timestamp_ms, open, high, low, close, volume, number_of_trades) + VALUES (?, ?, ?, ?, ?, ?, ?, ?) ''', records_to_upsert) conn.commit() @@ -203,10 +214,11 @@ class Resampler: logging.info(f" - {tf_name:<10}: {total:,} candles") def _get_last_timestamp(self, conn, table_name): - """Gets the timestamp of the last entry in a table as a pandas Timestamp.""" + """Gets the millisecond timestamp of the last entry in a table.""" try: - timestamp_str = pd.read_sql(f'SELECT MAX(datetime_utc) FROM "{table_name}"', conn).iloc[0, 0] - return pd.to_datetime(timestamp_str) if timestamp_str else None + # --- FIX: Query for the integer timestamp_ms, not the text datetime_utc --- + timestamp_ms = pd.read_sql(f'SELECT MAX(timestamp_ms) FROM "{table_name}"', conn).iloc[0, 0] + return int(timestamp_ms) if pd.notna(timestamp_ms) else None except (pd.io.sql.DatabaseError, IndexError): return None From fe5cc8e1d1464fa054f1fa4d11500ed019667522 Mon Sep 17 00:00:00 2001 From: DiTus Date: Sat, 25 Oct 2025 19:58:52 +0200 Subject: [PATCH 07/18] market cap fixes --- coin_id_map.py | 45 ++++++++++++++------- del_market_cap_tables.py | 56 ++++++++++++++++++++++++++ live_market_utils.py | 85 +++++++++++++++++++++++++++++++++------- main_app.py | 31 +++++++++------ market_cap_fetcher.py | 11 ++++-- 5 files changed, 184 insertions(+), 44 deletions(-) create mode 100644 del_market_cap_tables.py diff --git a/coin_id_map.py b/coin_id_map.py index 3f4608d..91183bc 100644 --- a/coin_id_map.py +++ b/coin_id_map.py @@ -11,7 +11,7 @@ def update_coin_mapping(): """ Fetches all assets from Hyperliquid and all coins from CoinGecko, then creates and saves a mapping from the Hyperliquid symbol to the - CoinGecko ID. + CoinGecko ID using a robust matching algorithm. """ setup_logging('normal', 'CoinMapUpdater') logging.info("Starting coin mapping update process...") @@ -20,13 +20,8 @@ def update_coin_mapping(): try: logging.info("Fetching assets from Hyperliquid...") info = Info(constants.MAINNET_API_URL, skip_ws=True) - # The meta object contains the 'universe' list with asset details meta, asset_contexts = info.meta_and_asset_ctxs() - - # --- FIX: The asset names are in the 'universe' list inside the meta object --- - # The 'universe' is a list of dictionaries, each with a 'name' - hyperliquid_assets = [asset['name'] for asset in meta['universe']] - + hyperliquid_assets = meta['universe'] logging.info(f"Found {len(hyperliquid_assets)} assets on Hyperliquid.") except Exception as e: logging.error(f"Failed to fetch assets from Hyperliquid: {e}") @@ -38,8 +33,11 @@ def update_coin_mapping(): response = requests.get("https://api.coingecko.com/api/v3/coins/list") response.raise_for_status() coingecko_coins = response.json() - # Create a lookup table: {symbol: id} - coingecko_lookup = {coin['symbol'].upper(): coin['id'] for coin in coingecko_coins} + + # Create more robust lookup tables + cg_symbol_lookup = {coin['symbol'].upper(): coin['id'] for coin in coingecko_coins} + cg_name_lookup = {coin['name'].upper(): coin['id'] for coin in coingecko_coins} + logging.info(f"Found {len(coingecko_coins)} coins on CoinGecko.") except requests.exceptions.RequestException as e: logging.error(f"Failed to fetch coin list from CoinGecko: {e}") @@ -47,24 +45,41 @@ def update_coin_mapping(): # --- 3. Create the mapping --- final_mapping = {} + # Use manual overrides for critical coins where symbols are ambiguous manual_overrides = { + "BTC": "bitcoin", + "ETH": "ethereum", + "SOL": "solana", + "BNB": "binancecoin", "HYPE": "hyperliquid", "PUMP": "pump-fun", "ASTER": "astar", + "ZEC": "zcash", + "SUI": "sui", + "ACE": "endurance", + # Add other important ones you watch here } logging.info("Generating symbol-to-id mapping...") - for asset_symbol in hyperliquid_assets: - # Check for manual overrides first + for asset in hyperliquid_assets: + asset_symbol = asset['name'].upper() + asset_name = asset.get('name', '').upper() # Use full name if available + + # Priority 1: Manual Overrides if asset_symbol in manual_overrides: final_mapping[asset_symbol] = manual_overrides[asset_symbol] continue - # Try to find a direct match in the CoinGecko lookup table - if asset_symbol in coingecko_lookup: - final_mapping[asset_symbol] = coingecko_lookup[asset_symbol] + # Priority 2: Exact Name Match + if asset_name in cg_name_lookup: + final_mapping[asset_symbol] = cg_name_lookup[asset_name] + continue + + # Priority 3: Symbol Match + if asset_symbol in cg_symbol_lookup: + final_mapping[asset_symbol] = cg_symbol_lookup[asset_symbol] else: - logging.warning(f"No direct match found for '{asset_symbol}' on CoinGecko. It will be excluded.") + logging.warning(f"No match found for '{asset_symbol}' on CoinGecko. It will be excluded.") # --- 4. Save the mapping to a file --- map_file_path = os.path.join("_data", "coin_id_map.json") diff --git a/del_market_cap_tables.py b/del_market_cap_tables.py new file mode 100644 index 0000000..d18050c --- /dev/null +++ b/del_market_cap_tables.py @@ -0,0 +1,56 @@ +import sqlite3 +import logging +import os + +from logging_utils import setup_logging + +def cleanup_market_cap_tables(): + """ + Scans the database and drops all tables related to market cap data + to allow for a clean refresh. + """ + setup_logging('normal', 'DBCleanup') + db_path = os.path.join("_data", "market_data.db") + + if not os.path.exists(db_path): + logging.error(f"Database file not found at '{db_path}'. Nothing to clean.") + return + + logging.info(f"Connecting to database at '{db_path}'...") + try: + with sqlite3.connect(db_path) as conn: + cursor = conn.cursor() + + # Find all tables that were created by the market cap fetcher + cursor.execute(""" + SELECT name FROM sqlite_master + WHERE type='table' + AND (name LIKE '%_market_cap' OR name LIKE 'TOTAL_%') + """) + + tables_to_drop = cursor.fetchall() + + if not tables_to_drop: + logging.info("No market cap tables found to clean up. Database is already clean.") + return + + logging.warning(f"Found {len(tables_to_drop)} market cap tables to remove...") + + for table in tables_to_drop: + table_name = table[0] + try: + logging.info(f"Dropping table: {table_name}...") + conn.execute(f'DROP TABLE IF EXISTS "{table_name}"') + except Exception as e: + logging.error(f"Failed to drop table {table_name}: {e}") + + conn.commit() + logging.info("--- Database cleanup complete ---") + + except sqlite3.Error as e: + logging.error(f"A database error occurred: {e}") + except Exception as e: + logging.error(f"An unexpected error occurred: {e}") + +if __name__ == "__main__": + cleanup_market_cap_tables() diff --git a/live_market_utils.py b/live_market_utils.py index bc5246b..31aa3eb 100644 --- a/live_market_utils.py +++ b/live_market_utils.py @@ -1,11 +1,33 @@ import logging import json import time +import os +import traceback from hyperliquid.info import Info from hyperliquid.utils import constants from logging_utils import setup_logging +# --- Configuration for standalone error logging --- +LOGS_DIR = "_logs" +ERROR_LOG_FILE = os.path.join(LOGS_DIR, "live_market_errors.log") + +def log_error(error_message: str, include_traceback: bool = True): + """A simple, robust file logger for any errors.""" + try: + if not os.path.exists(LOGS_DIR): + os.makedirs(LOGS_DIR) + + with open(ERROR_LOG_FILE, 'a') as f: + timestamp = time.strftime('%Y-%m-%d %H:%M:%S', time.gmtime()) + f.write(f"--- ERROR at {timestamp} UTC ---\n") + f.write(error_message + "\n") + if include_traceback: + f.write(traceback.format_exc() + "\n") + f.write("="*50 + "\n") + except Exception: + print(f"CRITICAL: Failed to write to error log file: {error_message}", file=sys.stderr) + def on_message(message, shared_prices_dict): """ Callback function to process incoming 'allMids' messages and update the @@ -14,36 +36,71 @@ def on_message(message, shared_prices_dict): try: if message.get("channel") == "allMids": new_prices = message.get("data", {}).get("mids", {}) - # Update the shared dictionary with the new price data shared_prices_dict.update(new_prices) except Exception as e: - # It's important to log errors inside the process - logging.error(f"Error in WebSocket on_message: {e}") + log_error(f"Error in WebSocket on_message: {e}") def start_live_feed(shared_prices_dict, log_level='off'): """ Main function for the WebSocket process. It takes a shared dictionary and continuously feeds it with live market data. + Includes a watchdog to auto-reconnect on failure. """ setup_logging(log_level, 'LiveMarketFeed') - # The Info object manages the WebSocket connection. - info = Info(constants.MAINNET_API_URL, skip_ws=False) - - # We need to wrap the callback in a lambda to pass our shared dictionary + info = None callback = lambda msg: on_message(msg, shared_prices_dict) - # Subscribe to the allMids channel - subscription = {"type": "allMids"} - info.subscribe(subscription, callback) - logging.info("Subscribed to 'allMids' for live mark prices.") + def connect_and_subscribe(): + """Establishes a new WebSocket connection and subscribes to allMids.""" + try: + logging.info("Connecting to Hyperliquid WebSocket...") + # Ensure skip_ws=False to create the ws_manager + new_info = Info(constants.MAINNET_API_URL, skip_ws=False) + subscription = {"type": "allMids"} + new_info.subscribe(subscription, callback) + logging.info("WebSocket connected and subscribed to 'allMids'.") + return new_info + except Exception as e: + log_error(f"Failed to connect to WebSocket: {e}") + return None + + info = connect_and_subscribe() logging.info("Starting live price feed process. Press Ctrl+C in main app to stop.") + try: - # The background thread in the SDK handles messages. This loop just keeps the process alive. while True: - time.sleep(1) + # --- Watchdog Logic --- + time.sleep(15) # Check the connection every 15 seconds + + if info is None or not info.ws_manager.is_running(): + # --- FIX: Log this critical failure to the persistent error log --- + error_msg = "WebSocket connection lost or not running. Attempting to reconnect..." + logging.warning(error_msg) + log_error(error_msg, include_traceback=False) # Log it to the file + + if info: + try: + info.ws_manager.stop() # Clean up old manager + except Exception as e: + log_error(f"Error stopping old ws_manager: {e}") + + info = connect_and_subscribe() + + if info is None: + logging.error("Reconnect failed, will retry in 15s.") + else: + logging.info("Successfully reconnected to WebSocket.") + else: + logging.debug("Watchdog check: WebSocket connection is active.") + except KeyboardInterrupt: logging.info("Stopping WebSocket listener...") - info.ws_manager.stop() + except Exception as e: + log_error(f"Live Market Feed process crashed: {e}") + finally: + if info and info.ws_manager: + info.ws_manager.stop() logging.info("Listener stopped.") + diff --git a/main_app.py b/main_app.py index aca61fb..32048c4 100644 --- a/main_app.py +++ b/main_app.py @@ -11,11 +11,12 @@ import pandas as pd from datetime import datetime, timezone from logging_utils import setup_logging -# --- Using the high-performance WebSocket utility for live prices --- +# --- Using the new high-performance WebSocket utility for live prices --- from live_market_utils import start_live_feed # --- Configuration --- WATCHED_COINS = ["BTC", "ETH", "SOL", "BNB", "HYPE", "ASTER", "ZEC", "PUMP", "SUI"] +# --- FIX: Replaced old data_fetcher with the new live_candle_fetcher --- LIVE_CANDLE_FETCHER_SCRIPT = "live_candle_fetcher.py" RESAMPLER_SCRIPT = "resampler.py" MARKET_CAP_FETCHER_SCRIPT = "market_cap_fetcher.py" @@ -26,9 +27,6 @@ MARKET_CAP_SUMMARY_FILE = os.path.join("_data", "market_cap_data.json") LOGS_DIR = "_logs" TRADE_EXECUTOR_STATUS_FILE = os.path.join(LOGS_DIR, "trade_executor_status.json") -# --- ADDED: Standard list of timeframes for the resampler to generate --- -STANDARD_RESAMPLING_TIMEFRAMES = ["3m", "5m", "15m", "30m", "37m", "148m", "1h", "2h", "4h", "8h", "12h", "1d", "3d", "1w", "1M"] - def format_market_cap(mc_value): """Formats a large number into a human-readable market cap string.""" @@ -49,6 +47,7 @@ def run_live_candle_fetcher(): while True: try: with open(log_file, 'a') as f: + # We can't get coins from strategies.json here, so we pass the default list command = [sys.executable, LIVE_CANDLE_FETCHER_SCRIPT, "--coins"] + WATCHED_COINS + ["--log-level", "off"] f.write(f"\n--- Starting {LIVE_CANDLE_FETCHER_SCRIPT} at {datetime.now()} ---\n") subprocess.run(command, check=True, stdout=f, stderr=subprocess.STDOUT) @@ -73,13 +72,12 @@ def run_resampler_job(timeframes_to_generate: list): f.write(f"Failed to run resampler.py job: {e}\n") -def resampler_scheduler(): - """Schedules the resampler.py script to run at the start of every minute.""" +def resampler_scheduler(timeframes_to_generate: list): + """Schedules the resampler.py script.""" setup_logging('off', 'ResamplerScheduler') - # Run once at startup - run_resampler_job(STANDARD_RESAMPLING_TIMEFRAMES) + run_resampler_job(timeframes_to_generate) # Schedule to run every minute at the :01 second mark - schedule.every().minute.at(":01").do(run_resampler_job, timeframes_to_generate=STANDARD_RESAMPLING_TIMEFRAMES) + schedule.every().minute.at(":01").do(run_resampler_job, timeframes_to_generate=timeframes_to_generate) logging.info("Resampler scheduled to run every minute at :01.") while True: schedule.run_pending() @@ -90,7 +88,7 @@ def run_market_cap_fetcher_job(): """Defines the job for the market cap fetcher, redirecting output.""" log_file = os.path.join(LOGS_DIR, "market_cap_fetcher.log") try: - command = [sys.executable, MARKET_CAP_FETCHER_SCRIPT, "--coins"] + WATCHED_COINS + ["--log-level", "off"] + command = [sys.executable, MARKET_CAP_FETCHER_SCRIPT, "--log-level", "off"] with open(log_file, 'a') as f: f.write(f"\n--- Starting {MARKET_CAP_FETCHER_SCRIPT} job at {datetime.now()} ---\n") subprocess.run(command, check=True, stdout=f, stderr=subprocess.STDOUT) @@ -319,13 +317,22 @@ if __name__ == "__main__": logging.error(f"Could not load strategies from '{STRATEGY_CONFIG_FILE}': {e}") sys.exit(1) + required_timeframes = set() + for name, config in strategy_configs.items(): + if config.get("enabled", False): + tf = config.get("parameters", {}).get("timeframe") + if tf: + required_timeframes.add(tf) + + if not required_timeframes: + logging.warning("No timeframes required by any enabled strategy.") + with multiprocessing.Manager() as manager: shared_prices = manager.dict() processes["Live Market Feed"] = multiprocessing.Process(target=start_live_feed, args=(shared_prices, 'off'), daemon=True) processes["Live Candle Fetcher"] = multiprocessing.Process(target=run_live_candle_fetcher, daemon=True) - # --- FIX: The resampler now uses a fixed list of TFs and a new schedule --- - processes["Resampler"] = multiprocessing.Process(target=resampler_scheduler, daemon=True) + processes["Resampler"] = multiprocessing.Process(target=resampler_scheduler, args=(list(required_timeframes),), daemon=True) processes["Market Cap Fetcher"] = multiprocessing.Process(target=market_cap_fetcher_scheduler, daemon=True) processes["Trade Executor"] = multiprocessing.Process(target=run_trade_executor, daemon=True) diff --git a/market_cap_fetcher.py b/market_cap_fetcher.py index 2557828..9b26fa5 100644 --- a/market_cap_fetcher.py +++ b/market_cap_fetcher.py @@ -41,7 +41,6 @@ class MarketCapFetcher: "DAI": "dai", "PYUSD": "paypal-usd" } - # --- ADDED: Ensure all tables have the correct schema --- self._ensure_tables_exist() def _ensure_tables_exist(self): @@ -291,8 +290,14 @@ class MarketCapFetcher: if not market_caps: return pd.DataFrame() df = pd.DataFrame(market_caps, columns=['timestamp_ms', 'market_cap']) - # --- FIX: Convert to datetime object, but do not format as string --- - df['datetime_utc'] = pd.to_datetime(df['timestamp_ms'], unit='ms') + + # --- FIX: Normalize all timestamps to the start of the day (00:00:00 UTC) --- + # This prevents duplicate entries for the same day (e.g., a "live" candle vs. the daily one) + df['datetime_utc'] = pd.to_datetime(df['timestamp_ms'], unit='ms').dt.normalize() + + # Recalculate the timestamp_ms to match the normalized 00:00:00 datetime + df['timestamp_ms'] = (df['datetime_utc'].astype('int64') // 10**6) + df.drop_duplicates(subset=['timestamp_ms'], keep='last', inplace=True) return df[['datetime_utc', 'timestamp_ms', 'market_cap']] From 76a858a7df7e6ba1c84474965eaf13f690644600 Mon Sep 17 00:00:00 2001 From: DiTus Date: Sat, 25 Oct 2025 19:59:13 +0200 Subject: [PATCH 08/18] detailed info about wallets --- wallet_data.py | 64 ++++++++++++++++++++------------------------------ 1 file changed, 26 insertions(+), 38 deletions(-) diff --git a/wallet_data.py b/wallet_data.py index 0d0a5ab..0188724 100644 --- a/wallet_data.py +++ b/wallet_data.py @@ -1,6 +1,6 @@ #!/usr/bin/env python3 """ -Hyperliquid Wallet Data Fetcher - Perfect Table Alignment +Hyperliquid Wallet Data Fetcher - FINAL Perfect Alignment ========================================================== Complete Python script to pull all available data for a Hyperliquid wallet via API. @@ -189,18 +189,19 @@ class HyperliquidWalletAnalyzer: def print_positions_summary_table(self, positions: list): """ - Print a summary table of all positions with properly aligned vertical separators. + Print a summary table of all positions with perfectly aligned columns. + NO emojis in data cells - keeps them simple text only for perfect alignment. Args: positions: List of position dictionaries """ print(f"\n{'='*130}") - print(f"POSITIONS SUMMARY TABLE") - print(f"{'='*130}") + print("POSITIONS SUMMARY TABLE") + print('='*130) - # Header with vertical separators - print("| Asset | Side | Size | Entry Price | Position Value | Unrealized PnL | ROE | Leverage |") - print("|----------|-----------|------------------|------------------|--------------------|--------------------|------------|------------|") + # Print header + print("| Asset | Side | Size | Entry Price | Position Value | Unrealized PnL | ROE | Leverage |") + print("|----------|-------|-------------------|-------------------|-------------------|-------------------|------------|------------|") total_position_value = 0 total_pnl = 0 @@ -220,53 +221,40 @@ class HyperliquidWalletAnalyzer: leverage_value = leverage.get('value', 0) if isinstance(leverage, dict) else 0 leverage_type = leverage.get('type', 'cross') if isinstance(leverage, dict) else 'cross' + # Determine side - NO EMOJIS in data side_text = "LONG" if size > 0 else "SHORT" - side_emoji = "šŸ“ˆ" if size > 0 else "šŸ“‰" - # Add color indicators (using text instead of emojis for alignment) + # Format PnL and ROE with signs pnl_sign = "+" if unrealized_pnl >= 0 else "" + roe_sign = "+" if return_on_equity >= 0 else "" # Accumulate totals total_position_value += abs(position_value) total_pnl += unrealized_pnl - # Format numbers with proper width - no emojis in the data - size_str = f"{abs(size):,.4f}" - entry_str = f"${entry_px:,.2f}" - value_str = f"${abs(position_value):,.2f}" - pnl_str = f"{pnl_sign}${unrealized_pnl:,.2f}" - roe_str = f"{return_on_equity:+.2%}" + # Format all values as strings with proper width + asset_str = f"{coin[:8]:<8}" + side_str = f"{side_text:<5}" + size_str = f"{abs(size):>17,.4f}" + entry_str = f"${entry_px:>16,.2f}" + value_str = f"${abs(position_value):>16,.2f}" + pnl_str = f"{pnl_sign}${unrealized_pnl:>15,.2f}" + roe_str = f"{roe_sign}{return_on_equity:>9.2%}" lev_str = f"{leverage_value}x {leverage_type[:4]}" - # Use fixed width with ljust/rjust for proper alignment - row = (f"| {coin[:8]:<8} " - f"| {side_text:<5} {side_emoji} " - f"| {size_str:>16} " - f"| {entry_str:>16} " - f"| {value_str:>18} " - f"| {pnl_str:>18} " - f"| {roe_str:>10} " - f"| {lev_str:<10} |") - print(row) + # Print row with exact spacing + print(f"| {asset_str} | {side_str} | {size_str} | {entry_str} | {value_str} | {pnl_str} | {roe_str} | {lev_str:<10} |") # Separator before totals - print("|==========|===========|==================|==================|====================|====================|============|============|") + print("|==========|=======|===================|===================|===================|===================|============|============|") # Total row - total_value_str = f"${total_position_value:,.2f}" + total_value_str = f"${total_position_value:>16,.2f}" total_pnl_sign = "+" if total_pnl >= 0 else "" - total_pnl_str = f"{total_pnl_sign}${total_pnl:,.2f}" + total_pnl_str = f"{total_pnl_sign}${total_pnl:>15,.2f}" - total_row = (f"| {'TOTAL':<8} " - f"| {'':<9} " - f"| {'':<16} " - f"| {'':<16} " - f"| {total_value_str:>18} " - f"| {total_pnl_str:>18} " - f"| {'':<10} " - f"| {'':<10} |") - print(total_row) - print(f"{'='*130}\n") + print(f"| TOTAL | | | | {total_value_str} | {total_pnl_str} | | |") + print('='*130 + '\n') def get_spot_state(self) -> Dict[str, Any]: """ From 541a71d2a623e9b7c7d72b09c93fbe1099fd678e Mon Sep 17 00:00:00 2001 From: DiTus Date: Sat, 25 Oct 2025 21:51:25 +0200 Subject: [PATCH 09/18] new strategies --- live_market_utils.py | 6 +- main_app.py | 97 +++++++++++++++--- position_monitor.py | 159 ++++++++++++++++++++++++++++++ strategies/base_strategy.py | 41 +++++--- strategies/ma_cross_strategy.py | 26 ++--- strategies/single_sma_strategy.py | 14 +-- 6 files changed, 291 insertions(+), 52 deletions(-) create mode 100644 position_monitor.py diff --git a/live_market_utils.py b/live_market_utils.py index 31aa3eb..ae0eab3 100644 --- a/live_market_utils.py +++ b/live_market_utils.py @@ -74,13 +74,13 @@ def start_live_feed(shared_prices_dict, log_level='off'): # --- Watchdog Logic --- time.sleep(15) # Check the connection every 15 seconds - if info is None or not info.ws_manager.is_running(): - # --- FIX: Log this critical failure to the persistent error log --- + # --- FIX: Changed 'is_running()' to the correct method 'is_alive()' --- + if info is None or not info.ws_manager.is_alive(): error_msg = "WebSocket connection lost or not running. Attempting to reconnect..." logging.warning(error_msg) log_error(error_msg, include_traceback=False) # Log it to the file - if info: + if info and info.ws_manager: # Check if ws_manager exists before stopping try: info.ws_manager.stop() # Clean up old manager except Exception as e: diff --git a/main_app.py b/main_app.py index 32048c4..b425701 100644 --- a/main_app.py +++ b/main_app.py @@ -9,10 +9,13 @@ import schedule import sqlite3 import pandas as pd from datetime import datetime, timezone +import importlib from logging_utils import setup_logging # --- Using the new high-performance WebSocket utility for live prices --- from live_market_utils import start_live_feed +# --- Import the base class for type hinting (optional but good practice) --- +from strategies.base_strategy import BaseStrategy # --- Configuration --- WATCHED_COINS = ["BTC", "ETH", "SOL", "BNB", "HYPE", "ASTER", "ZEC", "PUMP", "SUI"] @@ -108,21 +111,70 @@ def market_cap_fetcher_scheduler(): def run_strategy(strategy_name: str, config: dict): - """Target function to run a strategy, redirecting its output to a log file.""" - log_file = os.path.join(LOGS_DIR, f"strategy_{strategy_name}.log") - script_name = config['script'] - command = [sys.executable, script_name, "--name", strategy_name, "--log-level", "normal"] + """ + This function BECOMES the strategy runner. It is executed as a separate + process by multiprocessing. + """ + # These imports only happen in the new, lightweight process + import importlib + import os + import sys + import time + import logging + from logging_utils import setup_logging + from strategies.base_strategy import BaseStrategy + + # --- Setup logging to file for this specific process --- + log_file_path = os.path.join(LOGS_DIR, f"strategy_{strategy_name}.log") + try: + # Redirect stdout and stderr of this process to its log file + sys.stdout = open(log_file_path, 'a') + sys.stderr = sys.stdout + except Exception as e: + print(f"Failed to open log file for {strategy_name}: {e}") + + # Setup logging *within this process* + setup_logging('normal', f"Strategy-{strategy_name}") + + # --- Main resilient loop (was previously in main_app) --- while True: try: - with open(log_file, 'a') as f: - f.write(f"\n--- Starting strategy '{strategy_name}' at {datetime.now()} ---\n") - subprocess.run(command, check=True, stdout=f, stderr=subprocess.STDOUT) - except (subprocess.CalledProcessError, Exception) as e: - with open(log_file, 'a') as f: - f.write(f"\n--- PROCESS ERROR at {datetime.now()} ---\n") - f.write(f"Strategy '{strategy_name}' failed: {e}. Restarting...\n") + logging.info(f"--- Starting strategy '{strategy_name}' ---") + + # 1. Load the strategy class + if 'class' not in config: + logging.error(f"Strategy config for '{strategy_name}' is missing the 'class' key. Exiting.") + return + + module_path, class_name = config['class'].rsplit('.', 1) + module = importlib.import_module(module_path) + StrategyClass = getattr(module, class_name) + strategy = StrategyClass(strategy_name, config['parameters']) # Log level is now handled here + + # 2. Run the strategy's logic loop + logging.info(f"Starting main logic loop for {strategy.coin} on {strategy.timeframe}.") + while True: + df = strategy.load_data() + if df.empty: + logging.warning("No data loaded. Waiting 1 minute...") + time.sleep(60) + continue + + strategy.calculate_signals_and_state(df.copy()) + strategy._save_status() + + logging.info(f"Current Signal: {strategy.current_signal}") + time.sleep(60) # Simple 1-minute wait + + except KeyboardInterrupt: + logging.info("Strategy process stopping.") + return # Exit the outer loop on Ctrl+C + except Exception as e: + logging.error(f"Strategy '{strategy_name}' failed: {e}", exc_info=True) + logging.info("Restarting strategy in 10 seconds...") time.sleep(10) + def run_trade_executor(): """Target function to run the trade_executor.py script in a resilient loop.""" log_file = os.path.join(LOGS_DIR, "trade_executor.log") @@ -207,10 +259,22 @@ class MainApp: left_table_lines.append(f"{'#':<2} | {'Coin':^6} | {'Live Price':>10} | {'Market Cap':>15} |") left_table_lines.append("-" * left_table_width) for i, coin in enumerate(self.watched_coins, 1): - price = self.prices.get(coin, "Loading...") + price_str = self.prices.get(coin, "Loading...") + # Format the price string + try: + price_float = float(price_str) + if price_float < 1: + price_str = f"{price_float:>10.6f}" + elif price_float < 100: + price_str = f"{price_float:>10.4f}" + else: + price_str = f"{price_float:>10.2f}" + except (ValueError, TypeError): + price_str = f"{'Loading...':>10}" + market_cap = self.market_caps.get(coin) formatted_mc = format_market_cap(market_cap) - left_table_lines.append(f"{i:<2} | {coin:^6} | {price:>10} | {formatted_mc:>15} |") + left_table_lines.append(f"{i:<2} | {coin:^6} | {price_str} | {formatted_mc:>15} |") left_table_lines.append("-" * left_table_width) right_table_lines = ["--- Strategy Status ---"] @@ -234,7 +298,7 @@ class MainApp: timeframe = config_params.get('timeframe', 'N/A') size = config_params.get('size', 'N/A') - other_params = {k: v for k, v in config_params.items() if k not in ['coin', 'timeframe', 'size']} + other_params = {k: v for k, v in config.get('parameters', {}).items() if k not in ['coin', 'timeframe', 'size']} params_str = ", ".join([f"{k}={v}" for k, v in other_params.items()]) right_table_lines.append(f"{i:^2} | {name:<25} | {coin:^6} | {signal:^8} | {price_display:>12} | {last_change_display:>17} | {timeframe:^5} | {size:>8} | {params_str:<45} |") right_table_lines.append("-" * right_table_width) @@ -338,8 +402,9 @@ if __name__ == "__main__": for name, config in strategy_configs.items(): if config.get("enabled", False): - if not os.path.exists(config['script']): - logging.error(f"Strategy script '{config['script']}' for '{name}' not found. Skipping.") + # --- FIX: Check for the 'class' key, not the 'script' key --- + if 'class' not in config: + logging.error(f"Strategy '{name}' is missing 'class' key. Skipping.") continue proc = multiprocessing.Process(target=run_strategy, args=(name, config), daemon=True) processes[f"Strategy: {name}"] = proc diff --git a/position_monitor.py b/position_monitor.py new file mode 100644 index 0000000..e20ec5d --- /dev/null +++ b/position_monitor.py @@ -0,0 +1,159 @@ +import os +import sys +import time +import json +import argparse +from datetime import datetime, timezone +from hyperliquid.info import Info +from hyperliquid.utils import constants +from dotenv import load_dotenv +import logging + +from logging_utils import setup_logging + +# Load .env file +load_dotenv() + +class PositionMonitor: + """ + A standalone, read-only dashboard for monitoring all open perpetuals + positions, spot balances, and their associated strategies. + """ + + def __init__(self, log_level: str): + setup_logging(log_level, 'PositionMonitor') + + self.wallet_address = os.environ.get("MAIN_WALLET_ADDRESS") + if not self.wallet_address: + logging.error("MAIN_WALLET_ADDRESS not set in .env file. Cannot proceed.") + sys.exit(1) + + self.info = Info(constants.MAINNET_API_URL, skip_ws=True) + self.managed_positions_path = os.path.join("_data", "executor_managed_positions.json") + self._lines_printed = 0 + + logging.info(f"Monitoring vault address: {self.wallet_address}") + + def load_managed_positions(self) -> dict: + """Loads the state of which strategy manages which position.""" + if os.path.exists(self.managed_positions_path): + try: + with open(self.managed_positions_path, 'r') as f: + # Create a reverse map: {coin: strategy_name} + data = json.load(f) + return {v['coin']: k for k, v in data.items()} + except (IOError, json.JSONDecodeError): + logging.warning("Could not read managed positions file.") + return {} + + def run(self): + """Main loop to continuously refresh the dashboard.""" + try: + while True: + self.display_dashboard() + time.sleep(5) # Refresh every 5 seconds + except KeyboardInterrupt: + logging.info("Position monitor stopped.") + + def display_dashboard(self): + """Fetches all data and draws the dashboard without blinking.""" + if self._lines_printed > 0: + print(f"\x1b[{self._lines_printed}A", end="") + + output_lines = [] + try: + perp_state = self.info.user_state(self.wallet_address) + spot_state = self.info.spot_user_state(self.wallet_address) + coin_to_strategy_map = self.load_managed_positions() + + output_lines.append(f"--- Live Position Monitor for {self.wallet_address[:6]}...{self.wallet_address[-4:]} ---") + + # --- 1. Perpetuals Account Summary --- + margin_summary = perp_state.get('marginSummary', {}) + account_value = float(margin_summary.get('accountValue', 0)) + margin_used = float(margin_summary.get('totalMarginUsed', 0)) + utilization = (margin_used / account_value) * 100 if account_value > 0 else 0 + + output_lines.append("\n--- Perpetuals Account Summary ---") + output_lines.append(f" Account Value: ${account_value:,.2f} | Margin Used: ${margin_used:,.2f} | Utilization: {utilization:.2f}%") + + # --- 2. Spot Balances Summary --- + output_lines.append("\n--- Spot Balances ---") + spot_balances = spot_state.get('balances', []) + if not spot_balances: + output_lines.append(" No spot balances found.") + else: + balances_str = ", ".join([f"{b.get('coin')}: {float(b.get('total', 0)):,.4f}" for b in spot_balances if float(b.get('total', 0)) > 0]) + output_lines.append(f" {balances_str}") + + # --- 3. Open Positions Table --- + output_lines.append("\n--- Open Perpetual Positions ---") + positions = perp_state.get('assetPositions', []) + open_positions = [p for p in positions if p.get('position') and float(p['position'].get('szi', 0)) != 0] + + if not open_positions: + output_lines.append(" No open perpetual positions found.") + output_lines.append("") # Add a line for stable refresh + else: + self.build_positions_table(open_positions, coin_to_strategy_map, output_lines) + + except Exception as e: + output_lines = [f"An error occurred: {e}"] + + final_output = "\n".join(output_lines) + "\n\x1b[J" # \x1b[J clears to end of screen + print(final_output, end="") + + self._lines_printed = len(output_lines) + sys.stdout.flush() + + def build_positions_table(self, positions: list, coin_to_strategy_map: dict, output_lines: list): + """Builds the text for the positions summary table.""" + header = f"| {'Strategy':<25} | {'Coin':<6} | {'Side':<5} | {'Size':>15} | {'Entry Price':>12} | {'Mark Price':>12} | {'PNL':>15} | {'Leverage':>10} |" + output_lines.append(header) + output_lines.append("-" * len(header)) + + for position in positions: + pos = position.get('position', {}) + coin = pos.get('coin', 'Unknown') + size = float(pos.get('szi', 0)) + entry_px = float(pos.get('entryPx', 0)) + mark_px = float(pos.get('markPx', 0)) + unrealized_pnl = float(pos.get('unrealizedPnl', 0)) + + # Get leverage + position_value = float(pos.get('positionValue', 0)) + margin_used = float(pos.get('marginUsed', 0)) + leverage = (position_value / margin_used) if margin_used > 0 else 0 + + side_text = "LONG" if size > 0 else "SHORT" + pnl_sign = "+" if unrealized_pnl >= 0 else "" + + # Find the strategy that owns this coin + strategy_name = coin_to_strategy_map.get(coin, "Unmanaged") + + # Format all values as strings + strategy_str = f"{strategy_name:<25}" + coin_str = f"{coin:<6}" + side_str = f"{side_text:<5}" + size_str = f"{size:>15.4f}" + entry_str = f"${entry_px:>11,.2f}" + mark_str = f"${mark_px:>11,.2f}" + pnl_str = f"{pnl_sign}${unrealized_pnl:>14,.2f}" + lev_str = f"{leverage:>9.1f}x" + + output_lines.append(f"| {strategy_str} | {coin_str} | {side_str} | {size_str} | {entry_str} | {mark_str} | {pnl_str} | {lev_str} |") + + output_lines.append("-" * len(header)) + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Monitor a Hyperliquid wallet's positions in real-time.") + parser.add_argument( + "--log-level", + default="normal", + choices=['off', 'normal', 'debug'], + help="Set the logging level for the script." + ) + args = parser.parse_args() + + monitor = PositionMonitor(log_level=args.log_level) + monitor.run() diff --git a/strategies/base_strategy.py b/strategies/base_strategy.py index e40452c..ebeb063 100644 --- a/strategies/base_strategy.py +++ b/strategies/base_strategy.py @@ -6,13 +6,16 @@ import logging from datetime import datetime, timezone import sqlite3 +from logging_utils import setup_logging + class BaseStrategy(ABC): """ An abstract base class that defines the blueprint for all trading strategies. - It provides common functionality like loading data and saving status. + It provides common functionality like loading data, saving status, and state management. """ - def __init__(self, strategy_name: str, params: dict, log_level: str): + def __init__(self, strategy_name: str, params: dict): + # Note: log_level is not needed here as logging is set up by the process self.strategy_name = strategy_name self.params = params self.coin = params.get("coin", "N/A") @@ -20,21 +23,17 @@ class BaseStrategy(ABC): self.db_path = os.path.join("_data", "market_data.db") self.status_file_path = os.path.join("_data", f"strategy_status_{self.strategy_name}.json") - # --- ADDED: State variables required for status reporting --- self.current_signal = "INIT" self.last_signal_change_utc = None self.signal_price = None - # This will be set up by the child class after it's initialized - # setup_logging(log_level, f"Strategy-{self.strategy_name}") - # logging.info(f"Initializing with parameters: {self.params}") + logging.info(f"Initializing with parameters: {self.params}") def load_data(self) -> pd.DataFrame: """Loads historical data for the configured coin and timeframe.""" table_name = f"{self.coin}_{self.timeframe}" - # Dynamically determine the number of candles needed based on all possible period parameters - periods = [v for k, v in self.params.items() if 'period' in k or '_ma' in k or 'slow' in k] + periods = [v for k, v in self.params.items() if 'period' in k or '_ma' in k or 'slow' in k or 'fast' in k] limit = max(periods) + 50 if periods else 500 try: @@ -51,10 +50,30 @@ class BaseStrategy(ABC): @abstractmethod def calculate_signals(self, df: pd.DataFrame) -> pd.DataFrame: - """ - The core logic of the strategy. Must be implemented by child classes. - """ + """The core logic of the strategy. Must be implemented by child classes.""" pass + + def calculate_signals_and_state(self, df: pd.DataFrame): + """ + A wrapper that calls the strategy's signal calculation and then + determines the last signal change from the historical data. + """ + df_with_signals = self.calculate_signals(df) + df_with_signals.dropna(inplace=True) + if df_with_signals.empty: return + + df_with_signals['position_change'] = df_with_signals['signal'].diff() + + last_signal = df_with_signals['signal'].iloc[-1] + if last_signal == 1: self.current_signal = "BUY" + elif last_signal == -1: self.current_signal = "SELL" + else: self.current_signal = "HOLD" + + last_change_series = df_with_signals[df_with_signals['position_change'] != 0] + if not last_change_series.empty: + last_change_row = last_change_series.iloc[-1] + self.last_signal_change_utc = last_change_row.name.tz_localize('UTC').isoformat() + self.signal_price = last_change_row['close'] def _save_status(self): """Saves the current strategy state to its JSON file.""" diff --git a/strategies/ma_cross_strategy.py b/strategies/ma_cross_strategy.py index 0002375..3effd1e 100644 --- a/strategies/ma_cross_strategy.py +++ b/strategies/ma_cross_strategy.py @@ -7,29 +7,23 @@ class MaCrossStrategy(BaseStrategy): A strategy based on a fast Simple Moving Average (SMA) crossing a slow SMA. """ - def calculate_signals(self, df: pd.DataFrame) -> pd.DataFrame: - # Support multiple naming conventions: some configs use 'fast'/'slow' - # while others use 'short_ma'/'long_ma'. Normalize here so both work. - fast_ma_period = self.params.get('short_ma') or self.params.get('fast') or 0 - slow_ma_period = self.params.get('long_ma') or self.params.get('slow') or 0 - - # If parameters are missing, return a neutral signal frame. - if not fast_ma_period or not slow_ma_period: - logging.warning(f"Missing MA period parameters (fast={fast_ma_period}, slow={slow_ma_period}).") - df['signal'] = 0 - return df + def __init__(self, strategy_name: str, params: dict, log_level: str): + super().__init__(strategy_name, params) + self.fast_ma_period = self.params.get('short_ma') or self.params.get('fast') or 0 + self.slow_ma_period = self.params.get('long_ma') or self.params.get('slow') or 0 - if len(df) < slow_ma_period: - logging.warning(f"Not enough data for MA periods {fast_ma_period}/{slow_ma_period}. Need {slow_ma_period}, have {len(df)}.") + def calculate_signals(self, df: pd.DataFrame) -> pd.DataFrame: + if not self.fast_ma_period or not self.slow_ma_period or len(df) < self.slow_ma_period: + logging.warning(f"Not enough data for MA periods.") df['signal'] = 0 return df - df['fast_sma'] = df['close'].rolling(window=fast_ma_period).mean() - df['slow_sma'] = df['close'].rolling(window=slow_ma_period).mean() + df['fast_sma'] = df['close'].rolling(window=self.fast_ma_period).mean() + df['slow_sma'] = df['close'].rolling(window=self.slow_ma_period).mean() - # Signal is 1 for Golden Cross (fast > slow), -1 for Death Cross df['signal'] = 0 df.loc[df['fast_sma'] > df['slow_sma'], 'signal'] = 1 df.loc[df['fast_sma'] < df['slow_sma'], 'signal'] = -1 return df + diff --git a/strategies/single_sma_strategy.py b/strategies/single_sma_strategy.py index 52a1d33..4dd2ddb 100644 --- a/strategies/single_sma_strategy.py +++ b/strategies/single_sma_strategy.py @@ -6,19 +6,21 @@ class SingleSmaStrategy(BaseStrategy): """ A strategy based on the price crossing a single Simple Moving Average (SMA). """ + def __init__(self, strategy_name: str, params: dict): + super().__init__(strategy_name, params) + self.sma_period = self.params.get('sma_period', 0) + def calculate_signals(self, df: pd.DataFrame) -> pd.DataFrame: - sma_period = self.params.get('sma_period', 0) - - if not sma_period or len(df) < sma_period: - logging.warning(f"Not enough data for SMA period {sma_period}. Need {sma_period}, have {len(df)}.") + if not self.sma_period or len(df) < self.sma_period: + logging.warning(f"Not enough data for SMA period {self.sma_period}.") df['signal'] = 0 return df - df['sma'] = df['close'].rolling(window=sma_period).mean() + df['sma'] = df['close'].rolling(window=self.sma_period).mean() - # Signal is 1 when price is above SMA, -1 when below df['signal'] = 0 df.loc[df['close'] > df['sma'], 'signal'] = 1 df.loc[df['close'] < df['sma'], 'signal'] = -1 return df + From 93363750aea07a19753b74bdf4d758205866cf5f Mon Sep 17 00:00:00 2001 From: DiTus Date: Mon, 27 Oct 2025 21:54:33 +0100 Subject: [PATCH 10/18] fixes, old way to handle strategies --- address_monitor.py | 4 +- dashboard_data_fetcher.py | 136 +++++++++++ main_app.py | 172 +++++++++----- strategies/base_strategy.py | 112 +++++++-- strategies/copy_trader_strategy.py | 178 ++++++++++++++ strategies/ma_cross_strategy.py | 7 +- strategies/single_sma_strategy.py | 7 +- trade_executor.py | 283 +++++++++++++--------- whale_tracker.py | 367 +++++++++++++++++++++++++++++ 9 files changed, 1063 insertions(+), 203 deletions(-) create mode 100644 dashboard_data_fetcher.py create mode 100644 strategies/copy_trader_strategy.py create mode 100644 whale_tracker.py diff --git a/address_monitor.py b/address_monitor.py index 9bd768a..ced5e44 100644 --- a/address_monitor.py +++ b/address_monitor.py @@ -15,13 +15,13 @@ from logging_utils import setup_logging # --- Configuration --- DEFAULT_ADDRESSES_TO_WATCH = [ #"0xd4c1f7e8d876c4749228d515473d36f919583d1d", - "0x0fd468a73084daa6ea77a9261e40fdec3e67e0c7", + "0x47930c76790c865217472f2ddb4d14c640ee450a", # "0x4d69495d16fab95c3c27b76978affa50301079d0", # "0x09bc1cf4d9f0b59e1425a8fde4d4b1f7d3c9410d", "0xc6ac58a7a63339898aeda32499a8238a46d88e84", "0xa8ef95dbd3db55911d3307930a84b27d6e969526", # "0x4129c62faf652fea61375dcd9ca8ce24b2bb8b95", - "0xbf1935fe7ab6d0aa3ee8d3da47c2f80e215b2a1c", + "0x32885a6adac4375858E6edC092EfDDb0Ef46484C", ] MAX_FILLS_TO_DISPLAY = 10 LOGS_DIR = "_logs" diff --git a/dashboard_data_fetcher.py b/dashboard_data_fetcher.py new file mode 100644 index 0000000..789fbeb --- /dev/null +++ b/dashboard_data_fetcher.py @@ -0,0 +1,136 @@ +import logging +import os +import sys +import json +import time +import argparse # <-- THE FIX: Added this import +from datetime import datetime +from eth_account import Account +from hyperliquid.info import Info +from hyperliquid.utils import constants +from dotenv import load_dotenv + +from logging_utils import setup_logging + +# Load .env file +load_dotenv() + +class DashboardDataFetcher: + """ + A dedicated, lightweight process that runs in a loop to fetch and save + the account's state (balances, positions) for the main dashboard to display. + """ + + def __init__(self, log_level: str): + setup_logging(log_level, 'DashboardDataFetcher') + + self.vault_address = os.environ.get("MAIN_WALLET_ADDRESS") + if not self.vault_address: + logging.error("MAIN_WALLET_ADDRESS not set in .env file. Cannot proceed.") + sys.exit(1) + + self.info = Info(constants.MAINNET_API_URL, skip_ws=True) + self.status_file_path = os.path.join("_logs", "trade_executor_status.json") + self.managed_positions_path = os.path.join("_data", "executor_managed_positions.json") + logging.info(f"Dashboard Data Fetcher initialized for vault: {self.vault_address}") + + def load_managed_positions(self) -> dict: + """Loads the state of which strategy manages which position.""" + if os.path.exists(self.managed_positions_path): + try: + with open(self.managed_positions_path, 'r') as f: + data = json.load(f) + # Create a reverse map: {coin: strategy_name} + return {v['coin']: k for k, v in data.items()} + except (IOError, json.JSONDecodeError): + logging.warning("Could not read managed positions file.") + return {} + + def fetch_and_save_status(self): + """Fetches all account data and saves it to the JSON status file.""" + try: + perpetuals_state = self.info.user_state(self.vault_address) + spot_state = self.info.spot_user_state(self.vault_address) + meta, all_market_contexts = self.info.meta_and_asset_ctxs() + coin_to_strategy_map = self.load_managed_positions() + + status = { + "last_updated_utc": datetime.now().isoformat(), + "perpetuals_account": { "balances": {}, "open_positions": [] }, + "spot_account": { "positions": [] } + } + + # 1. Extract Perpetuals Account Data + margin_summary = perpetuals_state.get("marginSummary", {}) + status["perpetuals_account"]["balances"] = { + "account_value": margin_summary.get("accountValue"), + "total_margin_used": margin_summary.get("totalMarginUsed"), + "withdrawable": margin_summary.get("withdrawable") + } + + asset_positions = perpetuals_state.get("assetPositions", []) + for asset_pos in asset_positions: + pos = asset_pos.get('position', {}) + if float(pos.get('szi', 0)) != 0: + coin = pos.get('coin') + position_value = float(pos.get('positionValue', 0)) + margin_used = float(pos.get('marginUsed', 0)) + leverage = position_value / margin_used if margin_used > 0 else 0 + + position_info = { + "coin": coin, + "strategy": coin_to_strategy_map.get(coin, "Unmanaged"), + "size": pos.get('szi'), + "position_value": pos.get('positionValue'), + "entry_price": pos.get('entryPx'), + "mark_price": pos.get('markPx'), + "pnl": pos.get('unrealizedPnl'), + "liq_price": pos.get('liquidationPx'), + "margin": pos.get('marginUsed'), + "funding": pos.get('fundingRate'), + "leverage": f"{leverage:.1f}x" + } + status["perpetuals_account"]["open_positions"].append(position_info) + + # 2. Extract Spot Account Data + price_map = { asset.get("universe", {}).get("name"): asset.get("markPx") for asset in all_market_contexts if asset.get("universe", {}).get("name") } + spot_balances = spot_state.get("balances", []) + for bal in spot_balances: + total_balance = float(bal.get('total', 0)) + if total_balance > 0: + coin = bal.get('coin') + mark_price = float(price_map.get(coin, 0)) + status["spot_account"]["positions"].append({ + "coin": coin, "balance_size": total_balance, + "position_value": total_balance * mark_price, "pnl": "N/A" + }) + + # 3. Write to file + # Use atomic write to prevent partial reads from main_app + temp_file_path = self.status_file_path + ".tmp" + with open(temp_file_path, 'w', encoding='utf-8') as f: + json.dump(status, f, indent=4) + # Rename is atomic + os.replace(temp_file_path, self.status_file_path) + + logging.debug(f"Successfully updated dashboard status file.") + + except Exception as e: + logging.error(f"Failed to fetch or save account status: {e}") + + def run(self): + """Main loop to periodically fetch and save data.""" + while True: + self.fetch_and_save_status() + time.sleep(5) # Update dashboard data every 5 seconds + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Run the Dashboard Data Fetcher.") + parser.add_argument("--log-level", default="normal", choices=['off', 'normal', 'debug']) + args = parser.parse_args() + + fetcher = DashboardDataFetcher(log_level=args.log_level) + try: + fetcher.run() + except KeyboardInterrupt: + logging.info("Dashboard Data Fetcher stopped.") diff --git a/main_app.py b/main_app.py index b425701..5e7cb5d 100644 --- a/main_app.py +++ b/main_app.py @@ -19,11 +19,11 @@ from strategies.base_strategy import BaseStrategy # --- Configuration --- WATCHED_COINS = ["BTC", "ETH", "SOL", "BNB", "HYPE", "ASTER", "ZEC", "PUMP", "SUI"] -# --- FIX: Replaced old data_fetcher with the new live_candle_fetcher --- LIVE_CANDLE_FETCHER_SCRIPT = "live_candle_fetcher.py" RESAMPLER_SCRIPT = "resampler.py" MARKET_CAP_FETCHER_SCRIPT = "market_cap_fetcher.py" TRADE_EXECUTOR_SCRIPT = "trade_executor.py" +DASHBOARD_DATA_FETCHER_SCRIPT = "dashboard_data_fetcher.py" STRATEGY_CONFIG_FILE = os.path.join("_data", "strategies.json") DB_PATH = os.path.join("_data", "market_data.db") MARKET_CAP_SUMMARY_FILE = os.path.join("_data", "market_cap_data.json") @@ -65,6 +65,7 @@ def run_resampler_job(timeframes_to_generate: list): """Defines the job for the resampler, redirecting output to a log file.""" log_file = os.path.join(LOGS_DIR, "resampler.log") try: + # --- MODIFIED: No longer needs to check for empty list, coins are from WATCHED_COINS --- command = [sys.executable, RESAMPLER_SCRIPT, "--coins"] + WATCHED_COINS + ["--timeframes"] + timeframes_to_generate + ["--log-level", "normal"] with open(log_file, 'a') as f: f.write(f"\n--- Starting resampler.py job at {datetime.now()} ---\n") @@ -78,10 +79,15 @@ def run_resampler_job(timeframes_to_generate: list): def resampler_scheduler(timeframes_to_generate: list): """Schedules the resampler.py script.""" setup_logging('off', 'ResamplerScheduler') + + if not timeframes_to_generate: + logging.warning("Resampler scheduler started but no timeframes were provided to generate. The process will idle.") + return # Exit the function if there's nothing to do + run_resampler_job(timeframes_to_generate) # Schedule to run every minute at the :01 second mark schedule.every().minute.at(":01").do(run_resampler_job, timeframes_to_generate=timeframes_to_generate) - logging.info("Resampler scheduled to run every minute at :01.") + logging.info(f"Resampler scheduled to run every minute at :01 for {timeframes_to_generate}.") while True: schedule.run_pending() time.sleep(1) # Check every second to not miss the scheduled time @@ -110,10 +116,32 @@ def market_cap_fetcher_scheduler(): time.sleep(60) -def run_strategy(strategy_name: str, config: dict): +def run_trade_executor(trade_signal_queue): + """ + Target function to run the trade_executor.py script in a resilient loop. + It passes the shared signal queue to the executor. + """ + log_file = os.path.join(LOGS_DIR, "trade_executor.log") + while True: + try: + with open(log_file, 'a') as f: + f.write(f"\n--- Starting Trade Executor at {datetime.now()} ---\n") + + from trade_executor import TradeExecutor + + executor = TradeExecutor(log_level="normal", trade_signal_queue=trade_signal_queue) + executor.run() # This will block and run forever + + except (subprocess.CalledProcessError, Exception) as e: + with open(log_file, 'a') as f: + f.write(f"\n--- PROCESS ERROR at {datetime.now()} ---\n") + f.write(f"Trade Executor failed: {e}. Restarting...\n") + time.sleep(10) + +def run_strategy(strategy_name: str, config: dict, trade_signal_queue: multiprocessing.Queue): """ This function BECOMES the strategy runner. It is executed as a separate - process by multiprocessing. + process and pushes signals to the shared queue. """ # These imports only happen in the new, lightweight process import importlib @@ -127,21 +155,17 @@ def run_strategy(strategy_name: str, config: dict): # --- Setup logging to file for this specific process --- log_file_path = os.path.join(LOGS_DIR, f"strategy_{strategy_name}.log") try: - # Redirect stdout and stderr of this process to its log file - sys.stdout = open(log_file_path, 'a') + sys.stdout = open(log_file_path, 'a', buffering=1) # 1 = line buffering sys.stderr = sys.stdout except Exception as e: print(f"Failed to open log file for {strategy_name}: {e}") - # Setup logging *within this process* setup_logging('normal', f"Strategy-{strategy_name}") - # --- Main resilient loop (was previously in main_app) --- while True: try: logging.info(f"--- Starting strategy '{strategy_name}' ---") - # 1. Load the strategy class if 'class' not in config: logging.error(f"Strategy config for '{strategy_name}' is missing the 'class' key. Exiting.") return @@ -149,44 +173,37 @@ def run_strategy(strategy_name: str, config: dict): module_path, class_name = config['class'].rsplit('.', 1) module = importlib.import_module(module_path) StrategyClass = getattr(module, class_name) - strategy = StrategyClass(strategy_name, config['parameters']) # Log level is now handled here + + strategy = StrategyClass(strategy_name, config['parameters'], trade_signal_queue) - # 2. Run the strategy's logic loop - logging.info(f"Starting main logic loop for {strategy.coin} on {strategy.timeframe}.") - while True: - df = strategy.load_data() - if df.empty: - logging.warning("No data loaded. Waiting 1 minute...") - time.sleep(60) - continue - - strategy.calculate_signals_and_state(df.copy()) - strategy._save_status() - - logging.info(f"Current Signal: {strategy.current_signal}") - time.sleep(60) # Simple 1-minute wait + if config.get("is_event_driven", False): + logging.info(f"Starting EVENT-DRIVEN logic loop...") + strategy.run_event_loop() # This is a blocking call + else: + logging.info(f"Starting POLLING logic loop...") + strategy.run_polling_loop() # This is the original blocking call except KeyboardInterrupt: logging.info("Strategy process stopping.") - return # Exit the outer loop on Ctrl+C + return except Exception as e: logging.error(f"Strategy '{strategy_name}' failed: {e}", exc_info=True) logging.info("Restarting strategy in 10 seconds...") time.sleep(10) -def run_trade_executor(): - """Target function to run the trade_executor.py script in a resilient loop.""" - log_file = os.path.join(LOGS_DIR, "trade_executor.log") +def run_dashboard_data_fetcher(): + """Target function to run the dashboard_data_fetcher.py script.""" + log_file = os.path.join(LOGS_DIR, "dashboard_data_fetcher.log") while True: try: with open(log_file, 'a') as f: - f.write(f"\n--- Starting Trade Executor at {datetime.now()} ---\n") - subprocess.run([sys.executable, TRADE_EXECUTOR_SCRIPT, "--log-level", "normal"], check=True, stdout=f, stderr=subprocess.STDOUT) + f.write(f"\n--- Starting Dashboard Data Fetcher at {datetime.now()} ---\n") + subprocess.run([sys.executable, DASHBOARD_DATA_FETCHER_SCRIPT, "--log-level", "normal"], check=True, stdout=f, stderr=subprocess.STDOUT) except (subprocess.CalledProcessError, Exception) as e: with open(log_file, 'a') as f: f.write(f"\n--- PROCESS ERROR at {datetime.now()} ---\n") - f.write(f"Trade Executor failed: {e}. Restarting...\n") + f.write(f"Dashboard Data Fetcher failed: {e}. Restarting...\n") time.sleep(10) @@ -207,13 +224,15 @@ class MainApp: try: self.prices = dict(self.shared_prices) except Exception as e: - logging.debug(f"Could not read from shared prices dict: {e}") + logging.debug("Could not read from shared prices dict: {e}") def read_market_caps(self): + """Reads the latest market cap summary from its JSON file.""" if os.path.exists(MARKET_CAP_SUMMARY_FILE): try: with open(MARKET_CAP_SUMMARY_FILE, 'r', encoding='utf-8') as f: summary_data = json.load(f) + for coin in self.watched_coins: table_key = f"{coin}_market_cap" if table_key in summary_data: @@ -222,6 +241,7 @@ class MainApp: logging.debug("Could not read market cap summary file.") def read_strategy_statuses(self): + """Reads the status JSON file for each enabled strategy.""" enabled_statuses = {} for name, config in self.strategy_configs.items(): if config.get("enabled", False): @@ -237,6 +257,7 @@ class MainApp: self.strategy_statuses = enabled_statuses def read_executor_status(self): + """Reads the live status file from the trade executor.""" if os.path.exists(TRADE_EXECUTOR_STATUS_FILE): try: with open(TRADE_EXECUTOR_STATUS_FILE, 'r', encoding='utf-8') as f: @@ -247,11 +268,13 @@ class MainApp: self.open_positions = {} def check_process_status(self): + """Checks if the background processes are still running.""" for name, process in self.background_processes.items(): self.process_status[name] = "Running" if process.is_alive() else "STOPPED" def display_dashboard(self): - print("\x1b[H\x1b[J", end="") + """Displays a formatted dashboard with side-by-side tables.""" + print("\x1b[H\x1b[J", end="") # Clear screen left_table_lines = ["--- Market Dashboard ---"] left_table_width = 44 @@ -278,9 +301,11 @@ class MainApp: left_table_lines.append("-" * left_table_width) right_table_lines = ["--- Strategy Status ---"] - right_table_width = 154 + # --- FIX: Adjusted table width after removing parameters --- + right_table_width = 105 right_table_lines.append("-" * right_table_width) - right_table_lines.append(f"{'#':^2} | {'Strategy Name':<25} | {'Coin':^6} | {'Signal':^8} | {'Signal Price':>12} | {'Last Change':>17} | {'TF':^5} | {'Size':^8} | {'Parameters':<45} |") + # --- FIX: Removed 'Parameters' from header --- + right_table_lines.append(f"{'#':^2} | {'Strategy Name':<25} | {'Coin':^6} | {'Signal':^8} | {'Signal Price':>12} | {'Last Change':>17} | {'TF':^5} | {'Size':^8} |") right_table_lines.append("-" * right_table_width) for i, (name, status) in enumerate(self.strategy_statuses.items(), 1): signal = status.get('current_signal', 'N/A') @@ -294,13 +319,40 @@ class MainApp: last_change_display = dt_local.strftime('%Y-%m-%d %H:%M') config_params = self.strategy_configs.get(name, {}).get('parameters', {}) - coin = config_params.get('coin', 'N/A') - timeframe = config_params.get('timeframe', 'N/A') - size = config_params.get('size', 'N/A') - other_params = {k: v for k, v in config.get('parameters', {}).items() if k not in ['coin', 'timeframe', 'size']} - params_str = ", ".join([f"{k}={v}" for k, v in other_params.items()]) - right_table_lines.append(f"{i:^2} | {name:<25} | {coin:^6} | {signal:^8} | {price_display:>12} | {last_change_display:>17} | {timeframe:^5} | {size:>8} | {params_str:<45} |") + # --- NEW ROBUST LOGIC --- + # 1. Get Timeframe (always from config) + timeframe = config_params.get('timeframe', 'N/A') + + # 2. Get Coin: Try status file first (live), then config file (static) + coin = status.get('coin', config_params.get('coin', 'N/A')) + + # 3. Get Size: Try status file first, then config file + size_from_status = status.get('size', None) + size_from_config = config_params.get('size', None) + + size = "N/A" + if size_from_status is not None: + size = size_from_status # Use live status from copy_trader + elif size_from_config is not None: + size = size_from_config # Use config from simple strategy + elif 'coins_to_copy' in config_params: + # Special case: copy_trader, but status file is old (no 'size' field) + if coin != 'N/A' and coin != 'Multi': + # Try to find size in config if we know the coin from status + # --- SYNTAX FIX: Removed extra ".get(" --- + size = config_params.get('coins_to_copy', {}).get(coin, {}).get('size', 'Multi') + else: + coin = 'Multi' # It's a copy trader, but we don't know the coin + size = 'Multi' + + size_display = f"{size:>8}" if isinstance(size, (int, float)) else f"{str(size):>8}" + # --- END OF NEW LOGIC --- + + # --- FIX: Removed parameter string logic --- + + # --- FIX: Removed 'params_str' from the formatted line --- + right_table_lines.append(f"{i:^2} | {name:<25} | {coin:^6} | {signal:^8} | {price_display:>12} | {last_change_display:>17} | {timeframe:^5} | {size_display} |") right_table_lines.append("-" * right_table_width) output_lines = [] @@ -346,9 +398,7 @@ class MainApp: output_lines.append(f"{'Spot':<10} | {coin:<6} | {balance_size:>15} | {'-':>12} | {'-':>12} | {pnl:>15} | {'-':>10} |") output_lines.append("-" * pos_table_width) - output_lines.append("\n--- Background Processes ---") - for name, status in self.process_status.items(): - output_lines.append(f"{name:<25}: {status}") + # --- REMOVED: Background Processes Section --- final_output = "\n".join(output_lines) print(final_output) @@ -361,7 +411,7 @@ class MainApp: self.read_market_caps() self.read_strategy_statuses() self.read_executor_status() - self.check_process_status() + # --- REMOVED: self.check_process_status() --- self.display_dashboard() time.sleep(0.5) @@ -381,32 +431,34 @@ if __name__ == "__main__": logging.error(f"Could not load strategies from '{STRATEGY_CONFIG_FILE}': {e}") sys.exit(1) - required_timeframes = set() - for name, config in strategy_configs.items(): - if config.get("enabled", False): - tf = config.get("parameters", {}).get("timeframe") - if tf: - required_timeframes.add(tf) - - if not required_timeframes: - logging.warning("No timeframes required by any enabled strategy.") + # --- MODIFIED: Removed dynamic timeframe logic --- + # --- NEW: Hardcoded timeframes for the resampler --- + resampler_timeframes = [ + "3m", "5m", "15m", "30m", "1h", "2h", "4h", "8h", + "12h", "1d", "3d", "1w", "1M", "148m", "37m" + ] + logging.info(f"Using hardcoded timeframes for resampler: {resampler_timeframes}") + # --- END NEW --- with multiprocessing.Manager() as manager: shared_prices = manager.dict() + trade_signal_queue = manager.Queue() processes["Live Market Feed"] = multiprocessing.Process(target=start_live_feed, args=(shared_prices, 'off'), daemon=True) processes["Live Candle Fetcher"] = multiprocessing.Process(target=run_live_candle_fetcher, daemon=True) - processes["Resampler"] = multiprocessing.Process(target=resampler_scheduler, args=(list(required_timeframes),), daemon=True) + # --- MODIFIED: Pass the new hardcoded list to the resampler process --- + processes["Resampler"] = multiprocessing.Process(target=resampler_scheduler, args=(resampler_timeframes,), daemon=True) processes["Market Cap Fetcher"] = multiprocessing.Process(target=market_cap_fetcher_scheduler, daemon=True) - processes["Trade Executor"] = multiprocessing.Process(target=run_trade_executor, daemon=True) + + processes["Trade Executor"] = multiprocessing.Process(target=run_trade_executor, args=(trade_signal_queue,), daemon=True) + processes["Dashboard Data"] = multiprocessing.Process(target=run_dashboard_data_fetcher, daemon=True) for name, config in strategy_configs.items(): if config.get("enabled", False): - # --- FIX: Check for the 'class' key, not the 'script' key --- if 'class' not in config: logging.error(f"Strategy '{name}' is missing 'class' key. Skipping.") continue - proc = multiprocessing.Process(target=run_strategy, args=(name, config), daemon=True) + proc = multiprocessing.Process(target=run_strategy, args=(name, config, trade_signal_queue), daemon=True) processes[f"Strategy: {name}"] = proc for name, proc in processes.items(): @@ -424,6 +476,6 @@ if __name__ == "__main__": if proc.is_alive(): proc.terminate() for proc in processes.values(): if proc.is_alive(): proc.join() - logging.info("Shutdown complete.") - sys.exit(0) + logging.info("Shutdown complete.") + sys.exit(0) diff --git a/strategies/base_strategy.py b/strategies/base_strategy.py index ebeb063..afe3ff6 100644 --- a/strategies/base_strategy.py +++ b/strategies/base_strategy.py @@ -5,8 +5,12 @@ import os import logging from datetime import datetime, timezone import sqlite3 +import multiprocessing +import time from logging_utils import setup_logging +from hyperliquid.info import Info +from hyperliquid.utils import constants class BaseStrategy(ABC): """ @@ -14,20 +18,23 @@ class BaseStrategy(ABC): It provides common functionality like loading data, saving status, and state management. """ - def __init__(self, strategy_name: str, params: dict): - # Note: log_level is not needed here as logging is set up by the process + def __init__(self, strategy_name: str, params: dict, trade_signal_queue: multiprocessing.Queue = None, shared_status: dict = None): self.strategy_name = strategy_name self.params = params + self.trade_signal_queue = trade_signal_queue + # Optional multiprocessing.Manager().dict() to hold live status (avoids file IO) + self.shared_status = shared_status + self.coin = params.get("coin", "N/A") self.timeframe = params.get("timeframe", "N/A") self.db_path = os.path.join("_data", "market_data.db") self.status_file_path = os.path.join("_data", f"strategy_status_{self.strategy_name}.json") - + self.current_signal = "INIT" self.last_signal_change_utc = None self.signal_price = None - logging.info(f"Initializing with parameters: {self.params}") + # Note: Logging is set up by the run_strategy function def load_data(self) -> pd.DataFrame: """Loads historical data for the configured coin and timeframe.""" @@ -53,27 +60,41 @@ class BaseStrategy(ABC): """The core logic of the strategy. Must be implemented by child classes.""" pass - def calculate_signals_and_state(self, df: pd.DataFrame): + def calculate_signals_and_state(self, df: pd.DataFrame) -> bool: """ - A wrapper that calls the strategy's signal calculation and then - determines the last signal change from the historical data. + A wrapper that calls the strategy's signal calculation, determines + the last signal change, and returns True if the signal has changed. """ df_with_signals = self.calculate_signals(df) df_with_signals.dropna(inplace=True) - if df_with_signals.empty: return + if df_with_signals.empty: + return False df_with_signals['position_change'] = df_with_signals['signal'].diff() - last_signal = df_with_signals['signal'].iloc[-1] - if last_signal == 1: self.current_signal = "BUY" - elif last_signal == -1: self.current_signal = "SELL" - else: self.current_signal = "HOLD" + last_signal_int = df_with_signals['signal'].iloc[-1] + new_signal_str = "HOLD" + if last_signal_int == 1: new_signal_str = "BUY" + elif last_signal_int == -1: new_signal_str = "SELL" - last_change_series = df_with_signals[df_with_signals['position_change'] != 0] - if not last_change_series.empty: - last_change_row = last_change_series.iloc[-1] - self.last_signal_change_utc = last_change_row.name.tz_localize('UTC').isoformat() - self.signal_price = last_change_row['close'] + signal_changed = False + if self.current_signal == "INIT": + if new_signal_str == "BUY": self.current_signal = "INIT_BUY" + elif new_signal_str == "SELL": self.current_signal = "INIT_SELL" + else: self.current_signal = "HOLD" + signal_changed = True + elif new_signal_str != self.current_signal: + self.current_signal = new_signal_str + signal_changed = True + + if signal_changed: + last_change_series = df_with_signals[df_with_signals['position_change'] != 0] + if not last_change_series.empty: + last_change_row = last_change_series.iloc[-1] + self.last_signal_change_utc = last_change_row.name.tz_localize('UTC').isoformat() + self.signal_price = last_change_row['close'] + + return signal_changed def _save_status(self): """Saves the current strategy state to its JSON file.""" @@ -84,9 +105,62 @@ class BaseStrategy(ABC): "signal_price": self.signal_price, "last_checked_utc": datetime.now(timezone.utc).isoformat() } + # If a shared status dict is provided (Manager.dict()), update it instead of writing files try: - with open(self.status_file_path, 'w', encoding='utf-8') as f: - json.dump(status, f, indent=4) + if self.shared_status is not None: + try: + # store the status under the strategy name for easy lookup + self.shared_status[self.strategy_name] = status + except Exception: + # Manager proxies may not accept nested mutable objects consistently; assign a copy + self.shared_status[self.strategy_name] = dict(status) + else: + with open(self.status_file_path, 'w', encoding='utf-8') as f: + json.dump(status, f, indent=4) except IOError as e: logging.error(f"Failed to write status file for {self.strategy_name}: {e}") + def run_polling_loop(self): + """ + The default execution loop for polling-based strategies (e.g., SMAs). + """ + while True: + df = self.load_data() + if df.empty: + logging.warning("No data loaded. Waiting 1 minute...") + time.sleep(60) + continue + + signal_changed = self.calculate_signals_and_state(df.copy()) + self._save_status() + + if signal_changed or self.current_signal == "INIT_BUY" or self.current_signal == "INIT_SELL": + logging.warning(f"New signal detected: {self.current_signal}") + self.trade_signal_queue.put({ + "strategy_name": self.strategy_name, + "signal": self.current_signal, + "coin": self.coin, + "signal_price": self.signal_price, + "config": {"agent": self.params.get("agent"), "parameters": self.params} + }) + if self.current_signal == "INIT_BUY": self.current_signal = "BUY" + if self.current_signal == "INIT_SELL": self.current_signal = "SELL" + + logging.info(f"Current Signal: {self.current_signal}") + time.sleep(60) + + def run_event_loop(self): + """ + A placeholder for event-driven (WebSocket) strategies. + Child classes must override this. + """ + logging.error("run_event_loop() is not implemented for this strategy.") + time.sleep(3600) # Sleep for an hour to prevent rapid error loops + + def on_fill_message(self, message): + """ + Placeholder for the WebSocket callback. + Child classes must override this. + """ + pass + diff --git a/strategies/copy_trader_strategy.py b/strategies/copy_trader_strategy.py new file mode 100644 index 0000000..9f086ea --- /dev/null +++ b/strategies/copy_trader_strategy.py @@ -0,0 +1,178 @@ +import logging +import time +import json +from datetime import datetime, timezone +from hyperliquid.info import Info +from hyperliquid.utils import constants + +from strategies.base_strategy import BaseStrategy + +class CopyTraderStrategy(BaseStrategy): + """ + An event-driven strategy that monitors a target wallet address and + copies its trades for a specific set of allowed coins, using + per-coin size and leverage settings. + """ + def __init__(self, strategy_name: str, params: dict, trade_signal_queue, shared_status: dict = None): + super().__init__(strategy_name, params, trade_signal_queue, shared_status) + + self.target_address = self.params.get("target_address", "").lower() + + self.coins_to_copy = self.params.get("coins_to_copy", {}) + self.allowed_coins = list(self.coins_to_copy.keys()) + + if not self.target_address: + logging.error("No 'target_address' specified in parameters for copy trader.") + raise ValueError("target_address is required") + if not self.allowed_coins: + logging.warning("No 'coins_to_copy' configured. This strategy will not copy any trades.") + + self.info = None # Will be initialized in the run loop + + # --- FIX: Set initial state to "WAIT" --- + self.current_signal = "WAIT" + + # Record the strategy's start time to ignore historical data + self.start_time_utc = datetime.now(timezone.utc) + logging.info(f"Strategy initialized. Ignoring all trades before {self.start_time_utc.isoformat()}") + + def calculate_signals(self, df): + # This strategy is event-driven, so it does not use polling-based signal calculation. + pass + + def on_fill_message(self, message): + """ + This is the callback function that gets triggered by the WebSocket + every time the monitored address has an event. + """ + try: + channel = message.get("channel") + if channel not in ("user", "userFills", "userEvents"): + return + + data = message.get("data") + if not data: + return + + fills = data.get("fills", []) + if not fills: + return + + user_address = data.get("user", "").lower() + + if user_address != self.target_address: + return + + logging.debug(f"Received {len(fills)} fill(s) for user {user_address}") + + for fill in fills: + # Check if the trade is new or historical + trade_time = datetime.fromtimestamp(fill['time'] / 1000, tz=timezone.utc) + if trade_time < self.start_time_utc: + logging.info(f"Ignoring stale/historical trade from {trade_time.isoformat()}") + continue + + coin = fill.get('coin') + + if coin in self.allowed_coins: + side = fill.get('side') + price = float(fill.get('px')) + + signal = "HOLD" + if side == "B": + signal = "BUY" + elif side == "A": + signal = "SELL" + + coin_config = self.coins_to_copy.get(coin) + if not coin_config or not coin_config.get("size"): + logging.warning(f"No trade size specified for {coin}. Ignoring fill.") + continue + + # --- 1. Create the trade-specific config --- + trade_params = self.params.copy() + trade_params.update(coin_config) + trade_config = { + "agent": self.params.get("agent"), + "parameters": trade_params + } + + # --- 2. (PRIORITY) Put the signal on the queue for the executor --- + self.trade_signal_queue.put({ + "strategy_name": self.strategy_name, + "signal": signal, + "coin": coin, + "signal_price": price, + "config": trade_config + }) + + # --- 3. (Secondary) Update internal state and log --- + self.current_signal = signal + self.signal_price = price + self.last_signal_change_utc = trade_time.isoformat() + self._save_status() # Update the dashboard status file + + logging.warning(f"Copy trade signal SENT for {coin}: {signal} @ {price}, Size: {coin_config['size']}") + logging.info(f"Source trade logged: {json.dumps(fill)}") + + else: + logging.info(f"Ignoring fill for unmonitored coin: {coin}") + + except Exception as e: + logging.error(f"Error in on_fill_message: {e}", exc_info=True) + + def _connect_and_subscribe(self): + """ + Establishes a new WebSocket connection and subscribes to the userFills channel. + """ + try: + logging.info("Connecting to Hyperliquid WebSocket...") + self.info = Info(constants.MAINNET_API_URL, skip_ws=False) + subscription = {"type": "userFills", "user": self.target_address} + self.info.subscribe(subscription, self.on_fill_message) + logging.info(f"Subscribed to 'userFills' for target address: {self.target_address}") + return True + except Exception as e: + logging.error(f"Failed to connect or subscribe: {e}") + self.info = None + return False + + def run_event_loop(self): + """ + This method overrides the default polling loop. It establishes a + persistent WebSocket connection and runs a watchdog to ensure + it stays connected. + """ + if not self._connect_and_subscribe(): + # If connection fails on start, wait 60s before letting the process restart + time.sleep(60) + return + + # --- ADDED: Save the initial "WAIT" status --- + self._save_status() + + while True: + try: + time.sleep(15) # Check the connection every 15 seconds + + if self.info is None or not self.info.ws_manager.is_alive(): + logging.error(f"WebSocket connection lost. Attempting to reconnect...") + + if self.info and self.info.ws_manager: + try: + self.info.ws_manager.stop() + except Exception as e: + logging.error(f"Error stopping old ws_manager: {e}") + + if not self._connect_and_subscribe(): + logging.error("Reconnect failed, will retry in 15s.") + else: + logging.info("Successfully reconnected to WebSocket.") + # After reconnecting, save the current status again + self._save_status() + else: + logging.debug("Watchdog check: WebSocket connection is active.") + + except Exception as e: + logging.error(f"An error occurred in the watchdog loop: {e}", exc_info=True) + diff --git a/strategies/ma_cross_strategy.py b/strategies/ma_cross_strategy.py index 3effd1e..ff756a2 100644 --- a/strategies/ma_cross_strategy.py +++ b/strategies/ma_cross_strategy.py @@ -7,8 +7,10 @@ class MaCrossStrategy(BaseStrategy): A strategy based on a fast Simple Moving Average (SMA) crossing a slow SMA. """ - def __init__(self, strategy_name: str, params: dict, log_level: str): - super().__init__(strategy_name, params) + # --- FIX: Changed 3rd argument from log_level to trade_signal_queue --- + def __init__(self, strategy_name: str, params: dict, trade_signal_queue): + # --- FIX: Passed trade_signal_queue to the parent class --- + super().__init__(strategy_name, params, trade_signal_queue) self.fast_ma_period = self.params.get('short_ma') or self.params.get('fast') or 0 self.slow_ma_period = self.params.get('long_ma') or self.params.get('slow') or 0 @@ -26,4 +28,3 @@ class MaCrossStrategy(BaseStrategy): df.loc[df['fast_sma'] < df['slow_sma'], 'signal'] = -1 return df - diff --git a/strategies/single_sma_strategy.py b/strategies/single_sma_strategy.py index 4dd2ddb..a05f95e 100644 --- a/strategies/single_sma_strategy.py +++ b/strategies/single_sma_strategy.py @@ -6,8 +6,10 @@ class SingleSmaStrategy(BaseStrategy): """ A strategy based on the price crossing a single Simple Moving Average (SMA). """ - def __init__(self, strategy_name: str, params: dict): - super().__init__(strategy_name, params) + # --- FIX: Added trade_signal_queue to the constructor --- + def __init__(self, strategy_name: str, params: dict, trade_signal_queue): + # --- FIX: Passed trade_signal_queue to the parent class --- + super().__init__(strategy_name, params, trade_signal_queue) self.sma_period = self.params.get('sma_period', 0) def calculate_signals(self, df: pd.DataFrame) -> pd.DataFrame: @@ -23,4 +25,3 @@ class SingleSmaStrategy(BaseStrategy): df.loc[df['close'] < df['sma'], 'signal'] = -1 return df - diff --git a/trade_executor.py b/trade_executor.py index 35ac261..4a1f40c 100644 --- a/trade_executor.py +++ b/trade_executor.py @@ -5,6 +5,7 @@ import sys import json import time from datetime import datetime +import multiprocessing from eth_account import Account from hyperliquid.exchange import Exchange @@ -20,40 +21,41 @@ load_dotenv() class TradeExecutor: """ - Monitors strategy signals and executes trades using a multi-agent, - multi-strategy position management system. Each strategy's position is - tracked independently. + Monitors a shared queue for strategy signals and executes trades. + This script is now a dedicated, event-driven consumer. """ - def __init__(self, log_level: str): + def __init__(self, log_level: str, trade_signal_queue: multiprocessing.Queue, shared_executor_status: dict = None): setup_logging(log_level, 'TradeExecutor') - + + self.trade_signal_queue = trade_signal_queue + + # Optional Manager.dict() to store live managed positions and other executor status + self.shared_executor_status = shared_executor_status + self.vault_address = os.environ.get("MAIN_WALLET_ADDRESS") if not self.vault_address: logging.error("MAIN_WALLET_ADDRESS not set.") - sys.exit(1) + # --- FIX: Raise an exception instead of sys.exit() --- + # This allows the main_app process manager to catch and log the error. + raise ValueError("MAIN_WALLET_ADDRESS not set in environment.") + # --- FIX: Corrected constant name from MAIN_NET_API_URL to MAINNET_API_URL --- self.info = Info(constants.MAINNET_API_URL, skip_ws=True) self.exchanges = self._load_agents() if not self.exchanges: logging.error("No trading agents found in .env file.") - sys.exit(1) + # --- FIX: Raise an exception instead of sys.exit() --- + raise ValueError("No trading agents found in .env file. Check AGENT_PRIVATE_KEY or _AGENT_PK vars.") - strategy_config_path = os.path.join("_data", "strategies.json") - try: - with open(strategy_config_path, 'r') as f: - self.strategy_configs = {name: config for name, config in json.load(f).items() if config.get("enabled")} - logging.info(f"Loaded {len(self.strategy_configs)} enabled strategies.") - except (FileNotFoundError, json.JSONDecodeError) as e: - logging.error(f"Could not load strategies from '{strategy_config_path}': {e}") - sys.exit(1) - - self.status_file_path = os.path.join("_logs", "trade_executor_status.json") self.managed_positions_path = os.path.join("_data", "executor_managed_positions.json") self.managed_positions = self._load_managed_positions() + logging.info(f"TradeExecutor initialized. Agents available: {list(self.exchanges.keys())}") def _load_agents(self) -> dict: - """Discovers and initializes agents from environment variables.""" + """ + Discovers and initializes agents by scanning for environment variables. + """ exchanges = {} logging.info("Discovering agents from environment variables...") for env_var, private_key in os.environ.items(): @@ -74,10 +76,20 @@ class TradeExecutor: def _load_managed_positions(self) -> dict: """Loads the state of which strategy manages which position.""" + # Prefer shared in-memory state when available + try: + if self.shared_executor_status is not None: + mgr = self.shared_executor_status.get('managed_positions') if isinstance(self.shared_executor_status, dict) else None + if mgr: + logging.info("Loading managed positions from shared executor status.") + return dict(mgr) + except Exception: + logging.debug("Unable to read managed positions from shared status. Falling back to file.") + if os.path.exists(self.managed_positions_path): try: with open(self.managed_positions_path, 'r') as f: - logging.info("Loading existing managed positions state.") + logging.info("Loading existing managed positions state from file.") return json.load(f) except (IOError, json.JSONDecodeError): logging.warning("Could not read managed positions file. Starting fresh.") @@ -86,115 +98,154 @@ class TradeExecutor: def _save_managed_positions(self): """Saves the current state of managed positions.""" try: - with open(self.managed_positions_path, 'w') as f: - json.dump(self.managed_positions, f, indent=4) + if self.shared_executor_status is not None: + try: + # store under a known key + self.shared_executor_status['managed_positions'] = dict(self.managed_positions) + except Exception: + # fallback: try direct assignment + self.shared_executor_status['managed_positions'] = self.managed_positions + else: + with open(self.managed_positions_path, 'w') as f: + json.dump(self.managed_positions, f, indent=4) except IOError as e: logging.error(f"Failed to save managed positions state: {e}") - def _save_executor_status(self, perpetuals_state, spot_state, all_market_contexts): - """Saves the current balances and open positions to a live status file.""" - # This function is correct and does not need changes. - pass - def run(self): - """The main execution loop with advanced position management.""" - logging.info("Starting Trade Executor loop...") + """ + Main execution loop. Blocks and waits for a signal from the queue. + """ + logging.info("Trade Executor started. Waiting for signals...") while True: try: - perpetuals_state = self.info.user_state(self.vault_address) - open_positions_api = {pos['position'].get('coin'): pos['position'] for pos in perpetuals_state.get('assetPositions', []) if float(pos.get('position', {}).get('szi', 0)) != 0} - - for name, config in self.strategy_configs.items(): - coin = config['parameters'].get('coin') - size = config['parameters'].get('size') - # --- ADDED: Load leverage parameters from config --- - leverage_long = config['parameters'].get('leverage_long') - leverage_short = config['parameters'].get('leverage_short') - - status_file = os.path.join("_data", f"strategy_status_{name}.json") - if not os.path.exists(status_file): continue - with open(status_file, 'r') as f: status = json.load(f) - - desired_signal = status.get('current_signal') - current_position = self.managed_positions.get(name) - - agent_name = config.get("agent", "default").lower() - exchange_to_use = self.exchanges.get(agent_name) - if not exchange_to_use: - logging.error(f"[{name}] Agent '{agent_name}' not found. Skipping trade.") + trade_signal = self.trade_signal_queue.get() + if not trade_signal: + continue + + logging.info(f"Received signal: {trade_signal}") + + # Basic validation and debug information to help trace gaps + if 'config' not in trade_signal: + logging.error(f"Signal missing 'config' key. Ignoring: {trade_signal}") + continue + if 'strategy_name' not in trade_signal: + logging.error(f"Signal missing 'strategy_name' key. Ignoring: {trade_signal}") + continue + # Special command handling + if isinstance(trade_signal, dict) and trade_signal.get('_cmd') == 'CLOSE_ALL': + target_agent = trade_signal.get('agent') + logging.warning(f"Received CLOSE_ALL command for agent: {target_agent}") + if not target_agent: + logging.error("CLOSE_ALL command missing 'agent' field. Ignoring.") continue - # --- State Machine Logic with Configurable Leverage --- - if desired_signal == "BUY": - if not current_position: - if not all([size, leverage_long]): - logging.error(f"[{name}] 'size' or 'leverage_long' not defined. Skipping.") - continue - - logging.warning(f"[{name}] ACTION: Open LONG for {coin} with {leverage_long}x leverage.") - exchange_to_use.update_leverage(int(leverage_long), coin) - exchange_to_use.market_open(coin, True, size, None, 0.01) - self.managed_positions[name] = {"coin": coin, "side": "long", "size": size} - log_trade(strategy=name, coin=coin, action="OPEN_LONG", price=status.get('signal_price', 0), size=size, signal=desired_signal) + # Iterate managed positions and close those opened by the target agent + to_close = [s for s, v in self.managed_positions.items() if v.get('agent') == target_agent] + if not to_close: + logging.info(f"No managed positions found for agent '{target_agent}'.") + continue - elif current_position['side'] == 'short': - if not all([size, leverage_long]): - logging.error(f"[{name}] 'size' or 'leverage_long' not defined. Skipping.") - continue + for sname in to_close: + pos = self.managed_positions.get(sname) + if not pos: + continue + coin = pos.get('coin') + side = pos.get('side') + size = pos.get('size') + # Determine is_buy to neutralize the position + is_buy = True if side == 'short' else False + logging.warning(f"[CLOSE_ALL] Closing {side} position for strategy {sname}, coin {coin}, size {size}") + try: + # Use the agent's exchange if available + exch = self.exchanges.get(target_agent) + if exch: + exch.market_open(coin, is_buy, size, None, 0.01) + else: + logging.error(f"Exchange object for agent '{target_agent}' not found. Skipping live close for {sname}.") + except Exception as e: + logging.error(f"Error closing position for {sname}: {e}") + # remove from managed positions regardless to avoid stuck state + try: + del self.managed_positions[sname] + except KeyError: + pass - logging.warning(f"[{name}] ACTION: Close SHORT and open LONG for {coin} with {leverage_long}x leverage.") - exchange_to_use.update_leverage(int(leverage_long), coin) - exchange_to_use.market_open(coin, True, current_position['size'] + size, None, 0.01) - self.managed_positions[name] = {"coin": coin, "side": "long", "size": size} - log_trade(strategy=name, coin=coin, action="CLOSE_SHORT_&_REVERSE", price=status.get('signal_price', 0), size=size, signal=desired_signal) - - elif desired_signal == "SELL": - if not current_position: - if not all([size, leverage_short]): - logging.error(f"[{name}] 'size' or 'leverage_short' not defined. Skipping.") - continue - - logging.warning(f"[{name}] ACTION: Open SHORT for {coin} with {leverage_short}x leverage.") - exchange_to_use.update_leverage(int(leverage_short), coin) - exchange_to_use.market_open(coin, False, size, None, 0.01) - self.managed_positions[name] = {"coin": coin, "side": "short", "size": size} - log_trade(strategy=name, coin=coin, action="OPEN_SHORT", price=status.get('signal_price', 0), size=size, signal=desired_signal) - - elif current_position['side'] == 'long': - if not all([size, leverage_short]): - logging.error(f"[{name}] 'size' or 'leverage_short' not defined. Skipping.") - continue - - logging.warning(f"[{name}] ACTION: Close LONG and open SHORT for {coin} with {leverage_short}x leverage.") - exchange_to_use.update_leverage(int(leverage_short), coin) - exchange_to_use.market_open(coin, False, current_position['size'] + size, None, 0.01) - self.managed_positions[name] = {"coin": coin, "side": "short", "size": size} - log_trade(strategy=name, coin=coin, action="CLOSE_LONG_&_REVERSE", price=status.get('signal_price', 0), size=size, signal=desired_signal) - - elif desired_signal == "FLAT": - if current_position: - logging.warning(f"[{name}] ACTION: Close {current_position['side']} position for {coin}.") - is_buy = current_position['side'] == 'short' - exchange_to_use.market_open(coin, is_buy, current_position['size'], None, 0.01) - del self.managed_positions[name] - log_trade(strategy=name, coin=coin, action=f"CLOSE_{current_position['side'].upper()}", price=status.get('signal_price', 0), size=current_position['size'], signal=desired_signal) - self._save_managed_positions() + logging.info(f"CLOSE_ALL for agent '{target_agent}' completed.") + continue + + name = trade_signal['strategy_name'] + config = trade_signal['config'] + params = config.get('parameters', {}) + coin = trade_signal['coin'] + desired_signal = trade_signal['signal'] + status = trade_signal + + size = params.get('size') + if size is None: + logging.error(f"[{name}] No 'size' in parameters: {params}. Skipping.") + continue + leverage_long = int(params.get('leverage_long', 2)) + leverage_short = int(params.get('leverage_short', 2)) + current_position = self.managed_positions.get(name) + + agent_name = (config.get("agent") or "default").lower() + exchange_to_use = self.exchanges.get(agent_name) + if not exchange_to_use: + logging.error(f"[{name}] Agent '{agent_name}' not found. Available agents: {list(self.exchanges.keys())}. Skipping trade.") + continue + + # --- State Machine Logic (now runs instantly on signal) --- + if desired_signal == "BUY" or desired_signal == "INIT_BUY": + if not current_position: + logging.warning(f"[{name}] ACTION: Setting leverage to {leverage_long}x and opening LONG for {coin}.") + exchange_to_use.update_leverage(leverage_long, coin) + exchange_to_use.market_open(coin, True, size, None, 0.01) + self.managed_positions[name] = {"coin": coin, "side": "long", "size": size} + log_trade(strategy=name, coin=coin, action="OPEN_LONG", price=status.get('signal_price', 0), size=size, signal=desired_signal) + elif current_position['side'] == 'short': + logging.warning(f"[{name}] ACTION: Closing SHORT and opening LONG for {coin} with {leverage_long}x leverage.") + exchange_to_use.update_leverage(leverage_long, coin) + # 1. Close the short by buying back (this is a market_open, but is_buy=True) + exchange_to_use.market_open(coin, True, current_position['size'], None, 0.01) + log_trade(strategy=name, coin=coin, action="CLOSE_SHORT", price=status.get('signal_price', 0), size=current_position['size'], signal=desired_signal) + # 2. Open the new long + exchange_to_use.market_open(coin, True, size, None, 0.01) + self.managed_positions[name] = {"coin": coin, "side": "long", "size": size} + log_trade(strategy=name, coin=coin, action="OPEN_LONG", price=status.get('signal_price', 0), size=size, signal=desired_signal) + + elif desired_signal == "SELL" or desired_signal == "INIT_SELL": + if not current_position: + logging.warning(f"[{name}] ACTION: Setting leverage to {leverage_short}x and opening SHORT for {coin}.") + exchange_to_use.update_leverage(leverage_short, coin) + exchange_to_use.market_open(coin, False, size, None, 0.01) + self.managed_positions[name] = {"coin": coin, "side": "short", "size": size} + log_trade(strategy=name, coin=coin, action="OPEN_SHORT", price=status.get('signal_price', 0), size=size, signal=desired_signal) + elif current_position['side'] == 'long': + logging.warning(f"[{name}] ACTION: Closing LONG and opening SHORT for {coin} with {leverage_short}x leverage.") + exchange_to_use.update_leverage(leverage_short, coin) + # 1. Close the long by selling + exchange_to_use.market_open(coin, False, current_position['size'], None, 0.01) + log_trade(strategy=name, coin=coin, action="CLOSE_LONG", price=status.get('signal_price', 0), size=current_position['size'], signal=desired_signal) + # 2. Open the new short + exchange_to_use.market_open(coin, False, size, None, 0.01) + self.managed_positions[name] = {"coin": coin, "side": "short", "size": size} + # --- FIX: Corrected typo from 'signal.desired_signal' to 'signal=desired_signal' --- + log_trade(strategy=name, coin=coin, action="OPEN_SHORT", price=status.get('signal_price', 0), size=size, signal=desired_signal) + + elif desired_signal == "FLAT": + if current_position: + logging.warning(f"[{name}] ACTION: Close {current_position['side']} position for {coin}.") + is_buy = current_position['side'] == 'short' + exchange_to_use.market_open(coin, is_buy, current_position['size'], None, 0.01) + del self.managed_positions[name] + log_trade(strategy=name, coin=coin, action=f"CLOSE_{current_position['side'].upper()}", price=status.get('signal_price', 0), size=current_position['size'], signal=desired_signal) + + self._save_managed_positions() except Exception as e: - logging.error(f"An error occurred in the main executor loop: {e}") + logging.error(f"An error occurred in the main executor loop: {e}", exc_info=True) + time.sleep(1) - time.sleep(15) - - -if __name__ == "__main__": - parser = argparse.ArgumentParser(description="Run the Trade Executor.") - parser.add_argument("--log-level", default="normal", choices=['off', 'normal', 'debug']) - args = parser.parse_args() - - executor = TradeExecutor(log_level=args.log_level) - try: - executor.run() - except KeyboardInterrupt: - logging.info("Trade Executor stopped.") +# This script is no longer run directly, but is called by main_app.py diff --git a/whale_tracker.py b/whale_tracker.py new file mode 100644 index 0000000..69fc29a --- /dev/null +++ b/whale_tracker.py @@ -0,0 +1,367 @@ +import json +import os +import time +import requests +import logging +import argparse +import sys +from datetime import datetime, timedelta + +# --- Configuration --- +# !! IMPORTANT: Update this to your actual Hyperliquid API endpoint !! +API_ENDPOINT = "https://api.hyperliquid.xyz/info" + +INPUT_FILE = os.path.join("_data", "wallets_to_track.json") +OUTPUT_FILE = os.path.join("_data", "wallets_info.json") +LOGS_DIR = "_logs" +LOG_FILE = os.path.join(LOGS_DIR, "whale_tracker.log") + +# Polling intervals (in seconds) +POLL_INTERVALS = { + 'core_data': 10, # 5-15s range + 'open_orders': 20, # 15-30s range + 'account_metrics': 180, # 1-5m range + 'ledger_updates': 600, # 5-15m range + 'save_data': 5, # How often to write to wallets_info.json + 'reload_wallets': 60 # Check for wallet list changes every 60s +} + +class HyperliquidAPI: + """ + Client to handle POST requests to the Hyperliquid info endpoint. + """ + def __init__(self, base_url): + self.base_url = base_url + self.session = requests.Session() + logging.info(f"API Client initialized for endpoint: {base_url}") + + def post_request(self, payload): + """ + Internal helper to send POST requests and handle errors. + """ + try: + response = self.session.post(self.base_url, json=payload, timeout=10) + response.raise_for_status() # Raise an exception for bad status codes (4xx or 5xx) + return response.json() + except requests.exceptions.HTTPError as e: + logging.error(f"HTTP Error: {e.response.status_code} for {e.request.url}. Response: {e.response.text}") + except requests.exceptions.ConnectionError as e: + logging.error(f"Connection Error: {e}") + except requests.exceptions.Timeout: + logging.error(f"Request timed out for payload: {payload.get('type')}") + except json.JSONDecodeError: + logging.error(f"Failed to decode JSON response. Response text: {response.text if 'response' in locals() else 'No response text'}") + except Exception as e: + logging.error(f"An unexpected error occurred in post_request: {e}", exc_info=True) + return None + + def get_user_state(self, user_address: str): + payload = {"type": "clearinghouseState", "user": user_address} + return self.post_request(payload) + + def get_open_orders(self, user_address: str): + payload = {"type": "openOrders", "user": user_address} + return self.post_request(payload) + + def get_user_rate_limit(self, user_address: str): + payload = {"type": "userRateLimit", "user": user_address} + return self.post_request(payload) + + def get_user_ledger_updates(self, user_address: str, start_time_ms: int, end_time_ms: int): + payload = { + "type": "userNonFundingLedgerUpdates", + "user": user_address, + "startTime": start_time_ms, + "endTime": end_time_ms + } + return self.post_request(payload) + +class WalletTracker: + """ + Main class to track wallets, process data, and store results. + """ + def __init__(self, api_client, wallets_to_track): + self.api = api_client + self.wallets = wallets_to_track # This is the list of dicts + self.wallets_by_name = {w['name']: w for w in self.wallets} + self.wallets_data = { + wallet['name']: {"address": wallet['address']} for wallet in self.wallets + } + logging.info(f"WalletTracker initialized for {len(self.wallets)} wallets.") + + def reload_wallets(self): + """ + Checks the INPUT_FILE for changes and updates the tracked wallet list. + """ + logging.debug("Reloading wallet list...") + try: + with open(INPUT_FILE, 'r') as f: + new_wallets_list = json.load(f) + if not isinstance(new_wallets_list, list): + logging.warning(f"Failed to reload '{INPUT_FILE}': content is not a list.") + return + + new_wallets_by_name = {w['name']: w for w in new_wallets_list} + old_names = set(self.wallets_by_name.keys()) + new_names = set(new_wallets_by_name.keys()) + + added_names = new_names - old_names + removed_names = old_names - new_names + + if not added_names and not removed_names: + logging.debug("Wallet list is unchanged.") + return # No changes + + # Update internal wallet list + self.wallets = new_wallets_list + self.wallets_by_name = new_wallets_by_name + + # Add new wallets to wallets_data + for name in added_names: + self.wallets_data[name] = {"address": self.wallets_by_name[name]['address']} + logging.info(f"Added new wallet to track: {name}") + + # Remove old wallets from wallets_data + for name in removed_names: + if name in self.wallets_data: + del self.wallets_data[name] + logging.info(f"Removed wallet from tracking: {name}") + + logging.info(f"Wallet list reloaded. Tracking {len(self.wallets)} wallets.") + + except (FileNotFoundError, json.JSONDecodeError, ValueError) as e: + logging.error(f"Failed to reload and parse '{INPUT_FILE}': {e}") + except Exception as e: + logging.error(f"Unexpected error during wallet reload: {e}", exc_info=True) + + + def calculate_core_metrics(self, state_data: dict) -> dict: + """ + Performs calculations based on user_state data. + """ + if not state_data or 'crossMarginSummary' not in state_data: + logging.warning("Core state data is missing 'crossMarginSummary'.") + return {"raw_state": state_data} + + summary = state_data['crossMarginSummary'] + account_value = float(summary.get('accountValue', 0)) + margin_used = float(summary.get('totalMarginUsed', 0)) + + # Calculations + margin_utilization = (margin_used / account_value) if account_value > 0 else 0 + available_margin = account_value - margin_used + + total_position_value = 0 + if 'assetPositions' in state_data: + for pos in state_data.get('assetPositions', []): + try: + # Use 'value' for position value + pos_value_str = pos.get('position', {}).get('value', '0') + total_position_value += float(pos_value_str) + except (ValueError, TypeError): + logging.warning(f"Could not parse position value: {pos.get('position', {}).get('value')}") + continue + + portfolio_leverage = (total_position_value / account_value) if account_value > 0 else 0 + + # Return calculated metrics alongside raw data + return { + "raw_state": state_data, + "account_value": account_value, + "margin_used": margin_used, + "margin_utilization": margin_utilization, + "available_margin": available_margin, + "total_position_value": total_position_value, + "portfolio_leverage": portfolio_leverage + } + + def poll_core_data(self): + logging.debug("Polling Core Data...") + # Use self.wallets which is updated by reload_wallets + for wallet in self.wallets: + name = wallet['name'] + address = wallet['address'] + state_data = self.api.get_user_state(address) + if state_data: + calculated_data = self.calculate_core_metrics(state_data) + # Ensure wallet hasn't been removed by a concurrent reload + if name in self.wallets_data: + self.wallets_data[name]['core_state'] = calculated_data + time.sleep(0.1) # Avoid bursting requests + + def poll_open_orders(self): + logging.debug("Polling Open Orders...") + for wallet in self.wallets: + name = wallet['name'] + address = wallet['address'] + orders_data = self.api.get_open_orders(address) + if orders_data: + # TODO: Add calculations for 'pending_margin_required' if logic is available + if name in self.wallets_data: + self.wallets_data[name]['open_orders'] = {"raw_orders": orders_data} + time.sleep(0.1) + + def poll_account_metrics(self): + logging.debug("Polling Account Metrics...") + for wallet in self.wallets: + name = wallet['name'] + address = wallet['address'] + metrics_data = self.api.get_user_rate_limit(address) + if metrics_data: + if name in self.wallets_data: + self.wallets_data[name]['account_metrics'] = metrics_data + time.sleep(0.1) + + def poll_ledger_updates(self): + logging.debug("Polling Ledger Updates...") + end_time_ms = int(datetime.now().timestamp() * 1000) + start_time_ms = int((datetime.now() - timedelta(minutes=15)).timestamp() * 1000) + + for wallet in self.wallets: + name = wallet['name'] + address = wallet['address'] + ledger_data = self.api.get_user_ledger_updates(address, start_time_ms, end_time_ms) + if ledger_data: + if name in self.wallets_data: + self.wallets_data[name]['ledger_updates'] = ledger_data + time.sleep(0.1) + + def save_data_to_json(self): + """ + Atomically writes the current wallet data to the output JSON file. + (No longer needs cleaning logic) + """ + logging.debug(f"Saving data to {OUTPUT_FILE}...") + + temp_file = OUTPUT_FILE + ".tmp" + try: + # Save the data + with open(temp_file, 'w', encoding='utf-8') as f: + # self.wallets_data is automatically kept clean by reload_wallets + json.dump(self.wallets_data, f, indent=2) + # Atomic rename (move) + os.replace(temp_file, OUTPUT_FILE) + except (IOError, json.JSONDecodeError) as e: + logging.error(f"Failed to write wallet data to file: {e}") + except Exception as e: + logging.error(f"An unexpected error occurred during file save: {e}") + if os.path.exists(temp_file): + os.remove(temp_file) + +class WhaleTrackerRunner: + """ + Manages the polling loop using last-run timestamps instead of a complex scheduler. + """ + def __init__(self, api_client, wallets, shared_whale_data_dict=None): # Kept arg for compatibility + self.tracker = WalletTracker(api_client, wallets) + self.last_poll_times = {key: 0 for key in POLL_INTERVALS} + self.poll_intervals = POLL_INTERVALS + logging.info("WhaleTrackerRunner initialized to save to JSON file.") + + def update_shared_data(self): + """ + This function is no longer called by the run loop. + It's kept here to prevent errors if imported elsewhere, but is now unused. + """ + logging.debug("No shared dict, saving data to JSON file.") + self.tracker.save_data_to_json() + + + def run(self): + logging.info("Starting main polling loop...") + while True: + try: + now = time.time() + + if now - self.last_poll_times['reload_wallets'] > self.poll_intervals['reload_wallets']: + self.tracker.reload_wallets() + self.last_poll_times['reload_wallets'] = now + + if now - self.last_poll_times['core_data'] > self.poll_intervals['core_data']: + self.tracker.poll_core_data() + self.last_poll_times['core_data'] = now + + if now - self.last_poll_times['open_orders'] > self.poll_intervals['open_orders']: + self.tracker.poll_open_orders() + self.last_poll_times['open_orders'] = now + + if now - self.last_poll_times['account_metrics'] > self.poll_intervals['account_metrics']: + self.tracker.poll_account_metrics() + self.last_poll_times['account_metrics'] = now + + if now - self.last_poll_times['ledger_updates'] > self.poll_intervals['ledger_updates']: + self.tracker.poll_ledger_updates() + self.last_poll_times['ledger_updates'] = now + + if now - self.last_poll_times['save_data'] > self.poll_intervals['save_data']: + self.tracker.save_data_to_json() # <-- NEW + self.last_poll_times['save_data'] = now + + # Sleep for a short duration to prevent busy-waiting + time.sleep(1) + + except Exception as e: + logging.critical(f"Unhandled exception in main loop: {e}", exc_info=True) + time.sleep(10) + +def setup_logging(log_level_str: str, process_name: str): + """Configures logging for the script.""" + if not os.path.exists(LOGS_DIR): + try: + os.makedirs(LOGS_DIR) + except OSError as e: + print(f"Failed to create logs directory {LOGS_DIR}: {e}") + return + + level_map = { + 'debug': logging.DEBUG, + 'normal': logging.INFO, + 'off': logging.NOTSET + } + log_level = level_map.get(log_level_str.lower(), logging.INFO) + + if log_level == logging.NOTSET: + return + + handlers_list = [logging.FileHandler(LOG_FILE, mode='a')] + + if sys.stdout.isatty(): + handlers_list.append(logging.StreamHandler(sys.stdout)) + + logging.basicConfig( + level=log_level, + format=f"%(asctime)s.%(msecs)03d | {process_name:<20} | %(levelname)-8s | %(message)s", + datefmt='%Y-%m-%d %H:%M:%S', + handlers=handlers_list + ) + +if __name__ == "__main__": + parser = argparse.ArgumentParser(description="Hyperliquid Whale Tracker") + parser.add_argument("--log-level", default="normal", choices=['off', 'normal', 'debug']) + args = parser.parse_args() + + setup_logging(args.log_level, "WhaleTracker") + + # Load wallets to track + wallets_to_track = [] + try: + with open(INPUT_FILE, 'r') as f: + wallets_to_track = json.load(f) + if not isinstance(wallets_to_track, list) or not wallets_to_track: + raise ValueError(f"'{INPUT_FILE}' is empty or not a list.") + except (FileNotFoundError, json.JSONDecodeError, ValueError) as e: + logging.critical(f"Failed to load '{INPUT_FILE}': {e}. Exiting.") + sys.exit(1) + + # Initialize API client + api_client = HyperliquidAPI(base_url=API_ENDPOINT) + + # Initialize and run the tracker + runner = WhaleTrackerRunner(api_client, wallets_to_track, shared_whale_data_dict=None) + + try: + runner.run() + except KeyboardInterrupt: + logging.info("Whale Tracker shutting down.") + sys.exit(0) + From d650bb5fe2ac6701f3f83e37f5a7049a9af2787d Mon Sep 17 00:00:00 2001 From: DiTus Date: Sun, 2 Nov 2025 19:56:40 +0100 Subject: [PATCH 11/18] updated fast orders --- app.py | 0 base_strategy.py | 165 ++++++++++++ main_app.py | 392 +++++++++++++++++++++-------- position_manager.py | 168 +++++++++++++ strategies/copy_trader_strategy.py | 271 ++++++++++++++------ trade_executor.py | 290 +++++++++------------ 6 files changed, 932 insertions(+), 354 deletions(-) delete mode 100644 app.py create mode 100644 base_strategy.py create mode 100644 position_manager.py diff --git a/app.py b/app.py deleted file mode 100644 index e69de29..0000000 diff --git a/base_strategy.py b/base_strategy.py new file mode 100644 index 0000000..5f0a001 --- /dev/null +++ b/base_strategy.py @@ -0,0 +1,165 @@ +from abc import ABC, abstractmethod +import pandas as pd +import json +import os +import logging +from datetime import datetime, timezone +import sqlite3 +import multiprocessing +import time + +from logging_utils import setup_logging +from hyperliquid.info import Info +from hyperliquid.utils import constants + +class BaseStrategy(ABC): + """ + An abstract base class that defines the blueprint for all trading strategies. + It provides common functionality like loading data, saving status, and state management. + """ + + def __init__(self, strategy_name: str, params: dict, trade_signal_queue: multiprocessing.Queue = None, shared_status: dict = None): + self.strategy_name = strategy_name + self.params = params + self.trade_signal_queue = trade_signal_queue + # Optional multiprocessing.Manager().dict() to hold live status (avoids file IO) + self.shared_status = shared_status + + self.coin = params.get("coin", "N/A") + self.timeframe = params.get("timeframe", "N/A") + self.db_path = os.path.join("_data", "market_data.db") + self.status_file_path = os.path.join("_data", f"strategy_status_{self.strategy_name}.json") + + self.current_signal = "INIT" + self.last_signal_change_utc = None + self.signal_price = None + + # Note: Logging is set up by the run_strategy function + + def load_data(self) -> pd.DataFrame: + """Loads historical data for the configured coin and timeframe.""" + table_name = f"{self.coin}_{self.timeframe}" + + periods = [v for k, v in self.params.items() if 'period' in k or '_ma' in k or 'slow' in k or 'fast' in k] + limit = max(periods) + 50 if periods else 500 + + try: + with sqlite3.connect(f"file:{self.db_path}?mode=ro", uri=True) as conn: + query = f'SELECT * FROM "{table_name}" ORDER BY datetime_utc DESC LIMIT {limit}' + df = pd.read_sql(query, conn, parse_dates=['datetime_utc']) + if df.empty: return pd.DataFrame() + df.set_index('datetime_utc', inplace=True) + df.sort_index(inplace=True) + return df + except Exception as e: + logging.error(f"Failed to load data from table '{table_name}': {e}") + return pd.DataFrame() + + @abstractmethod + def calculate_signals(self, df: pd.DataFrame) -> pd.DataFrame: + """The core logic of the strategy. Must be implemented by child classes.""" + pass + + def calculate_signals_and_state(self, df: pd.DataFrame) -> bool: + """ + A wrapper that calls the strategy's signal calculation, determines + the last signal change, and returns True if the signal has changed. + """ + df_with_signals = self.calculate_signals(df) + df_with_signals.dropna(inplace=True) + if df_with_signals.empty: + return False + + df_with_signals['position_change'] = df_with_signals['signal'].diff() + + last_signal_int = df_with_signals['signal'].iloc[-1] + new_signal_str = "HOLD" + if last_signal_int == 1: new_signal_str = "BUY" + elif last_signal_int == -1: new_signal_str = "SELL" + + signal_changed = False + if self.current_signal == "INIT": + if new_signal_str == "BUY": self.current_signal = "INIT_BUY" + elif new_signal_str == "SELL": self.current_signal = "INIT_SELL" + else: self.current_signal = "HOLD" + signal_changed = True + elif new_signal_str != self.current_signal: + self.current_signal = new_signal_str + signal_changed = True + + if signal_changed: + last_change_series = df_with_signals[df_with_signals['position_change'] != 0] + if not last_change_series.empty: + last_change_row = last_change_series.iloc[-1] + self.last_signal_change_utc = last_change_row.name.tz_localize('UTC').isoformat() + self.signal_price = last_change_row['close'] + + return signal_changed + + def _save_status(self): + """Saves the current strategy state to its JSON file.""" + status = { + "strategy_name": self.strategy_name, + "current_signal": self.current_signal, + "last_signal_change_utc": self.last_signal_change_utc, + "signal_price": self.signal_price, + "last_checked_utc": datetime.now(timezone.utc).isoformat() + } + # If a shared status dict is provided (Manager.dict()), update it instead of writing files + try: + if self.shared_status is not None: + try: + # store the status under the strategy name for easy lookup + self.shared_status[self.strategy_name] = status + except Exception: + # Manager proxies may not accept nested mutable objects consistently; assign a copy + self.shared_status[self.strategy_name] = dict(status) + else: + with open(self.status_file_path, 'w', encoding='utf-8') as f: + json.dump(status, f, indent=4) + except IOError as e: + logging.error(f"Failed to write status file for {self.strategy_name}: {e}") + + def run_polling_loop(self): + """ + The default execution loop for polling-based strategies (e.g., SMAs). + """ + while True: + df = self.load_data() + if df.empty: + logging.warning("No data loaded. Waiting 1 minute...") + time.sleep(60) + continue + + signal_changed = self.calculate_signals_and_state(df.copy()) + self._save_status() + + if signal_changed or self.current_signal == "INIT_BUY" or self.current_signal == "INIT_SELL": + logging.warning(f"New signal detected: {self.current_signal}") + self.trade_signal_queue.put({ + "strategy_name": self.strategy_name, + "signal": self.current_signal, + "coin": self.coin, + "signal_price": self.signal_price, + "config": {"agent": self.params.get("agent"), "parameters": self.params} + }) + if self.current_signal == "INIT_BUY": self.current_signal = "BUY" + if self.current_signal == "INIT_SELL": self.current_signal = "SELL" + + logging.info(f"Current Signal: {self.current_signal}") + time.sleep(60) + + def run_event_loop(self): + """ + A placeholder for event-driven (WebSocket) strategies. + Child classes must override this. + """ + logging.error("run_event_loop() is not implemented for this strategy.") + time.sleep(3600) # Sleep for an hour to prevent rapid error loops + + def on_fill_message(self, message): + """ + Placeholder for the WebSocket callback. + Child classes must override this. + """ + pass diff --git a/main_app.py b/main_app.py index 5e7cb5d..f64f7ce 100644 --- a/main_app.py +++ b/main_app.py @@ -10,6 +10,8 @@ import sqlite3 import pandas as pd from datetime import datetime, timezone import importlib +# --- REMOVED: import signal --- +# --- REMOVED: from queue import Empty --- from logging_utils import setup_logging # --- Using the new high-performance WebSocket utility for live prices --- @@ -22,7 +24,7 @@ WATCHED_COINS = ["BTC", "ETH", "SOL", "BNB", "HYPE", "ASTER", "ZEC", "PUMP", "SU LIVE_CANDLE_FETCHER_SCRIPT = "live_candle_fetcher.py" RESAMPLER_SCRIPT = "resampler.py" MARKET_CAP_FETCHER_SCRIPT = "market_cap_fetcher.py" -TRADE_EXECUTOR_SCRIPT = "trade_executor.py" +# --- REMOVED: trade_executor.py is no longer a script --- DASHBOARD_DATA_FETCHER_SCRIPT = "dashboard_data_fetcher.py" STRATEGY_CONFIG_FILE = os.path.join("_data", "strategies.json") DB_PATH = os.path.join("_data", "market_data.db") @@ -46,26 +48,61 @@ def format_market_cap(mc_value): def run_live_candle_fetcher(): """Target function to run the live_candle_fetcher.py script in a resilient loop.""" + + # --- GRACEFUL SHUTDOWN HANDLER --- + import signal + shutdown_requested = False + + def handle_shutdown_signal(signum, frame): + nonlocal shutdown_requested + # Use print here as logging may not be set up + print(f"[CandleFetcher] Shutdown signal ({signum}) received. Will stop after current run.") + shutdown_requested = True + + signal.signal(signal.SIGTERM, handle_shutdown_signal) + signal.signal(signal.SIGINT, handle_shutdown_signal) + # --- END GRACEFUL SHUTDOWN HANDLER --- + log_file = os.path.join(LOGS_DIR, "live_candle_fetcher.log") - while True: + + while not shutdown_requested: # <-- MODIFIED + process = None try: with open(log_file, 'a') as f: - # We can't get coins from strategies.json here, so we pass the default list command = [sys.executable, LIVE_CANDLE_FETCHER_SCRIPT, "--coins"] + WATCHED_COINS + ["--log-level", "off"] f.write(f"\n--- Starting {LIVE_CANDLE_FETCHER_SCRIPT} at {datetime.now()} ---\n") - subprocess.run(command, check=True, stdout=f, stderr=subprocess.STDOUT) + + # Use Popen instead of run to be non-blocking + process = subprocess.Popen(command, stdout=f, stderr=subprocess.STDOUT) + + # Poll the process and check for shutdown request + while process.poll() is None and not shutdown_requested: + time.sleep(0.5) # Poll every 500ms + + if shutdown_requested and process.poll() is None: + print(f"[CandleFetcher] Terminating subprocess {LIVE_CANDLE_FETCHER_SCRIPT}...") + process.terminate() # Terminate the child script + process.wait() # Wait for it to exit + print(f"[CandleFetcher] Subprocess terminated.") + except (subprocess.CalledProcessError, Exception) as e: + if shutdown_requested: + break # Don't restart if we're shutting down with open(log_file, 'a') as f: f.write(f"\n--- PROCESS ERROR at {datetime.now()} ---\n") f.write(f"Live candle fetcher failed: {e}. Restarting...\n") time.sleep(5) + + if shutdown_requested: + break # Exit outer loop + + print("[CandleFetcher] Live candle fetcher shutting down.") def run_resampler_job(timeframes_to_generate: list): """Defines the job for the resampler, redirecting output to a log file.""" log_file = os.path.join(LOGS_DIR, "resampler.log") try: - # --- MODIFIED: No longer needs to check for empty list, coins are from WATCHED_COINS --- command = [sys.executable, RESAMPLER_SCRIPT, "--coins"] + WATCHED_COINS + ["--timeframes"] + timeframes_to_generate + ["--log-level", "normal"] with open(log_file, 'a') as f: f.write(f"\n--- Starting resampler.py job at {datetime.now()} ---\n") @@ -78,19 +115,34 @@ def run_resampler_job(timeframes_to_generate: list): def resampler_scheduler(timeframes_to_generate: list): """Schedules the resampler.py script.""" - setup_logging('off', 'ResamplerScheduler') + + # --- GRACEFUL SHUTDOWN HANDLER --- + import signal + shutdown_requested = False - if not timeframes_to_generate: - logging.warning("Resampler scheduler started but no timeframes were provided to generate. The process will idle.") - return # Exit the function if there's nothing to do - + def handle_shutdown_signal(signum, frame): + nonlocal shutdown_requested + try: + logging.info(f"Shutdown signal ({signum}) received. Exiting loop...") + except NameError: + print(f"[ResamplerScheduler] Shutdown signal ({signum}) received. Exiting loop...") + shutdown_requested = True + + signal.signal(signal.SIGTERM, handle_shutdown_signal) + signal.signal(signal.SIGINT, handle_shutdown_signal) + # --- END GRACEFUL SHUTDOWN HANDLER --- + + setup_logging('off', 'ResamplerScheduler') run_resampler_job(timeframes_to_generate) # Schedule to run every minute at the :01 second mark schedule.every().minute.at(":01").do(run_resampler_job, timeframes_to_generate=timeframes_to_generate) - logging.info(f"Resampler scheduled to run every minute at :01 for {timeframes_to_generate}.") - while True: + logging.info("Resampler scheduled to run every minute at :01.") + + while not shutdown_requested: # <-- MODIFIED schedule.run_pending() - time.sleep(1) # Check every second to not miss the scheduled time + time.sleep(0.5) # Check every 500ms to not miss the scheduled time and be responsive + + logging.info("ResamplerScheduler shutting down.") def run_market_cap_fetcher_job(): @@ -109,35 +161,128 @@ def run_market_cap_fetcher_job(): def market_cap_fetcher_scheduler(): """Schedules the market_cap_fetcher.py script to run daily at a specific UTC time.""" + + # --- GRACEFUL SHUTDOWN HANDLER --- + import signal + shutdown_requested = False + + def handle_shutdown_signal(signum, frame): + nonlocal shutdown_requested + try: + logging.info(f"Shutdown signal ({signum}) received. Exiting loop...") + except NameError: + print(f"[MarketCapScheduler] Shutdown signal ({signum}) received. Exiting loop...") + shutdown_requested = True + + signal.signal(signal.SIGTERM, handle_shutdown_signal) + signal.signal(signal.SIGINT, handle_shutdown_signal) + # --- END GRACEFUL SHUTDOWN HANDLER --- + setup_logging('off', 'MarketCapScheduler') schedule.every().day.at("00:15", "UTC").do(run_market_cap_fetcher_job) - while True: + + while not shutdown_requested: # <-- MODIFIED schedule.run_pending() - time.sleep(60) + # Sleep for 60 seconds, but check for shutdown flag every second + for _ in range(60): + if shutdown_requested: + break + time.sleep(1) + + logging.info("MarketCapScheduler shutting down.") -def run_trade_executor(trade_signal_queue): +def run_trade_executor(order_execution_queue: multiprocessing.Queue): """ - Target function to run the trade_executor.py script in a resilient loop. - It passes the shared signal queue to the executor. + Target function to run the TradeExecutor class in a resilient loop. + It now consumes from the order_execution_queue. """ - log_file = os.path.join(LOGS_DIR, "trade_executor.log") + + # --- GRACEFUL SHUTDOWN HANDLER --- + import signal + + def handle_shutdown_signal(signum, frame): + # We can just raise KeyboardInterrupt, as it's handled below + logging.info(f"Shutdown signal ({signum}) received. Initiating graceful exit...") + raise KeyboardInterrupt + + signal.signal(signal.SIGTERM, handle_shutdown_signal) + # --- END GRACEFUL SHUTDOWN HANDLER --- + + log_file_path = os.path.join(LOGS_DIR, "trade_executor.log") + try: + sys.stdout = open(log_file_path, 'a', buffering=1) + sys.stderr = sys.stdout + except Exception as e: + print(f"Failed to open log file for TradeExecutor: {e}") + + setup_logging('normal', f"TradeExecutor") + logging.info("\n--- Starting Trade Executor process ---") + while True: try: - with open(log_file, 'a') as f: - f.write(f"\n--- Starting Trade Executor at {datetime.now()} ---\n") - - from trade_executor import TradeExecutor - - executor = TradeExecutor(log_level="normal", trade_signal_queue=trade_signal_queue) - executor.run() # This will block and run forever - - except (subprocess.CalledProcessError, Exception) as e: - with open(log_file, 'a') as f: - f.write(f"\n--- PROCESS ERROR at {datetime.now()} ---\n") - f.write(f"Trade Executor failed: {e}. Restarting...\n") + from trade_executor import TradeExecutor + + executor = TradeExecutor(log_level="normal", order_execution_queue=order_execution_queue) + + # --- REVERTED: Call executor.run() directly --- + executor.run() + + except KeyboardInterrupt: + logging.info("Trade Executor interrupted. Exiting.") + return + except Exception as e: + logging.error(f"Trade Executor failed: {e}. Restarting...\n", exc_info=True) time.sleep(10) +def run_position_manager(trade_signal_queue: multiprocessing.Queue, order_execution_queue: multiprocessing.Queue): + """ + Target function to run the PositionManager class in a resilient loop. + Consumes from trade_signal_queue, produces for order_execution_queue. + """ + + # --- GRACEFUL SHUTDOWN HANDLER --- + import signal + + def handle_shutdown_signal(signum, frame): + # Raise KeyboardInterrupt, as it's handled by the loop + logging.info(f"Shutdown signal ({signum}) received. Initiating graceful exit...") + raise KeyboardInterrupt + + signal.signal(signal.SIGTERM, handle_shutdown_signal) + # --- END GRACEFUL SHUTDOWN HANDLER --- + + log_file_path = os.path.join(LOGS_DIR, "position_manager.log") + try: + sys.stdout = open(log_file_path, 'a', buffering=1) + sys.stderr = sys.stdout + except Exception as e: + print(f"Failed to open log file for PositionManager: {e}") + + setup_logging('normal', f"PositionManager") + logging.info("\n--- Starting Position Manager process ---") + + while True: + try: + from position_manager import PositionManager + + manager = PositionManager( + log_level="normal", + trade_signal_queue=trade_signal_queue, + order_execution_queue=order_execution_queue + ) + + # --- REVERTED: Call manager.run() directly --- + manager.run() + + except KeyboardInterrupt: + logging.info("Position Manager interrupted. Exiting.") + return + except Exception as e: + logging.error(f"Position Manager failed: {e}. Restarting...\n", exc_info=True) + time.sleep(10) + + def run_strategy(strategy_name: str, config: dict, trade_signal_queue: multiprocessing.Queue): """ This function BECOMES the strategy runner. It is executed as a separate @@ -149,9 +294,22 @@ def run_strategy(strategy_name: str, config: dict, trade_signal_queue: multiproc import sys import time import logging + import signal # <-- ADDED from logging_utils import setup_logging from strategies.base_strategy import BaseStrategy + # --- GRACEFUL SHUTDOWN HANDLER --- + def handle_shutdown_signal(signum, frame): + # Raise KeyboardInterrupt, as it's handled by the loop + try: + logging.info(f"Shutdown signal ({signum}) received. Initiating graceful exit...") + except NameError: + print(f"[Strategy-{strategy_name}] Shutdown signal ({signum}) received. Initiating graceful exit...") + raise KeyboardInterrupt + + signal.signal(signal.SIGTERM, handle_shutdown_signal) + # --- END GRACEFUL SHUTDOWN HANDLER --- + # --- Setup logging to file for this specific process --- log_file_path = os.path.join(LOGS_DIR, f"strategy_{strategy_name}.log") try: @@ -183,10 +341,12 @@ def run_strategy(strategy_name: str, config: dict, trade_signal_queue: multiproc logging.info(f"Starting POLLING logic loop...") strategy.run_polling_loop() # This is the original blocking call + # --- REVERTED: Added back simple KeyboardInterrupt handler --- except KeyboardInterrupt: - logging.info("Strategy process stopping.") + logging.info(f"Strategy {strategy_name} process stopping.") return except Exception as e: + # --- REVERTED: Removed specific check for KeyboardInterrupt --- logging.error(f"Strategy '{strategy_name}' failed: {e}", exc_info=True) logging.info("Restarting strategy in 10 seconds...") time.sleep(10) @@ -194,12 +354,30 @@ def run_strategy(strategy_name: str, config: dict, trade_signal_queue: multiproc def run_dashboard_data_fetcher(): """Target function to run the dashboard_data_fetcher.py script.""" + + # --- GRACEFUL SHUTDOWN HANDLER --- + import signal + + def handle_shutdown_signal(signum, frame): + # Raise KeyboardInterrupt, as it's handled by the loop + try: + logging.info(f"Shutdown signal ({signum}) received. Initiating graceful exit...") + except NameError: + print(f"[DashboardDataFetcher] Shutdown signal ({signum}) received. Initiating graceful exit...") + raise KeyboardInterrupt + + signal.signal(signal.SIGTERM, handle_shutdown_signal) + # --- END GRACEFUL SHUTDOWN HANDLER --- + log_file = os.path.join(LOGS_DIR, "dashboard_data_fetcher.log") while True: try: with open(log_file, 'a') as f: f.write(f"\n--- Starting Dashboard Data Fetcher at {datetime.now()} ---\n") subprocess.run([sys.executable, DASHBOARD_DATA_FETCHER_SCRIPT, "--log-level", "normal"], check=True, stdout=f, stderr=subprocess.STDOUT) + except KeyboardInterrupt: # --- MODIFIED: Added to catch interrupt --- + logging.info("Dashboard Data Fetcher stopping.") + break except (subprocess.CalledProcessError, Exception) as e: with open(log_file, 'a') as f: f.write(f"\n--- PROCESS ERROR at {datetime.now()} ---\n") @@ -320,39 +498,27 @@ class MainApp: config_params = self.strategy_configs.get(name, {}).get('parameters', {}) - # --- NEW ROBUST LOGIC --- - # 1. Get Timeframe (always from config) - timeframe = config_params.get('timeframe', 'N/A') - - # 2. Get Coin: Try status file first (live), then config file (static) + # --- FIX: Read coin/size from status file first, fallback to config --- coin = status.get('coin', config_params.get('coin', 'N/A')) - # 3. Get Size: Try status file first, then config file - size_from_status = status.get('size', None) - size_from_config = config_params.get('size', None) + # --- FIX: Handle nested 'coins_to_copy' logic for size --- + if 'coins_to_copy' in config_params: + size = status.get('size', 'Multi') + else: + size = config_params.get('size', 'N/A') - size = "N/A" - if size_from_status is not None: - size = size_from_status # Use live status from copy_trader - elif size_from_config is not None: - size = size_from_config # Use config from simple strategy - elif 'coins_to_copy' in config_params: - # Special case: copy_trader, but status file is old (no 'size' field) - if coin != 'N/A' and coin != 'Multi': - # Try to find size in config if we know the coin from status - # --- SYNTAX FIX: Removed extra ".get(" --- - size = config_params.get('coins_to_copy', {}).get(coin, {}).get('size', 'Multi') - else: - coin = 'Multi' # It's a copy trader, but we don't know the coin - size = 'Multi' - - size_display = f"{size:>8}" if isinstance(size, (int, float)) else f"{str(size):>8}" - # --- END OF NEW LOGIC --- + timeframe = config_params.get('timeframe', 'N/A') # --- FIX: Removed parameter string logic --- # --- FIX: Removed 'params_str' from the formatted line --- - right_table_lines.append(f"{i:^2} | {name:<25} | {coin:^6} | {signal:^8} | {price_display:>12} | {last_change_display:>17} | {timeframe:^5} | {size_display} |") + + size_display = f"{size:>8}" + if isinstance(size, (int, float)): + size_display = f"{size:>8.4f}" # Format size to 4 decimal places + # --- END NEW LOGIC --- + + right_table_lines.append(f"{i:^2} | {name:<25} | {coin:^6} | {signal:^8} | {price_display:>12} | {last_change_display:>17} | {timeframe:^5} | {size:>8} |") right_table_lines.append("-" * right_table_width) output_lines = [] @@ -370,35 +536,7 @@ class MainApp: output_lines.append(f"{'Account':<10} | {'Coin':<6} | {'Size':>15} | {'Entry Price':>12} | {'Mark Price':>12} | {'PNL':>15} | {'Leverage':>10} |") output_lines.append("-" * pos_table_width) - perps_positions = self.open_positions.get('perpetuals_account', {}).get('open_positions', []) - spot_positions = self.open_positions.get('spot_account', {}).get('positions', []) - - if not perps_positions and not spot_positions: - output_lines.append("No open positions found.") - else: - for pos in perps_positions: - try: - pnl = float(pos.get('pnl', 0.0)) - pnl_str = f"${pnl:,.2f}" - except (ValueError, TypeError): - pnl_str = "Error" - - coin = pos.get('coin') or '-' - size = pos.get('size') or '-' - entry_price = pos.get('entry_price') or '-' - mark_price = pos.get('mark_price') or '-' - leverage = pos.get('leverage') or '-' - - output_lines.append(f"{'Perps':<10} | {coin:<6} | {size:>15} | {entry_price:>12} | {mark_price:>12} | {pnl_str:>15} | {leverage:>10} |") - - for pos in spot_positions: - pnl = pos.get('pnl', 'N/A') - coin = pos.get('coin') or '-' - balance_size = pos.get('balance_size') or '-' - output_lines.append(f"{'Spot':<10} | {coin:<6} | {balance_size:>15} | {'-':>12} | {'-':>12} | {pnl:>15} | {'-':>10} |") - output_lines.append("-" * pos_table_width) - - # --- REMOVED: Background Processes Section --- + # --- REMOVED: Background Processes section --- final_output = "\n".join(output_lines) print(final_output) @@ -422,7 +560,7 @@ if __name__ == "__main__": os.makedirs(LOGS_DIR) processes = {} - strategy_configs = {} + # --- REVERTED: Removed process groups --- try: with open(STRATEGY_CONFIG_FILE, 'r') as f: @@ -431,27 +569,37 @@ if __name__ == "__main__": logging.error(f"Could not load strategies from '{STRATEGY_CONFIG_FILE}': {e}") sys.exit(1) - # --- MODIFIED: Removed dynamic timeframe logic --- - # --- NEW: Hardcoded timeframes for the resampler --- - resampler_timeframes = [ + # --- FIX: Hardcoded timeframes --- + required_timeframes = [ "3m", "5m", "15m", "30m", "1h", "2h", "4h", "8h", "12h", "1d", "3d", "1w", "1M", "148m", "37m" ] - logging.info(f"Using hardcoded timeframes for resampler: {resampler_timeframes}") - # --- END NEW --- - + logging.info(f"Using fixed timeframes for resampler: {required_timeframes}") + with multiprocessing.Manager() as manager: shared_prices = manager.dict() + # --- FIX: Create TWO queues --- trade_signal_queue = manager.Queue() + order_execution_queue = manager.Queue() + # --- REVERTED: All processes are daemon=True and in one dict --- + processes["Live Market Feed"] = multiprocessing.Process(target=start_live_feed, args=(shared_prices, 'off'), daemon=True) processes["Live Candle Fetcher"] = multiprocessing.Process(target=run_live_candle_fetcher, daemon=True) - # --- MODIFIED: Pass the new hardcoded list to the resampler process --- - processes["Resampler"] = multiprocessing.Process(target=resampler_scheduler, args=(resampler_timeframes,), daemon=True) + processes["Resampler"] = multiprocessing.Process(target=resampler_scheduler, args=(list(required_timeframes),), daemon=True) processes["Market Cap Fetcher"] = multiprocessing.Process(target=market_cap_fetcher_scheduler, daemon=True) - - processes["Trade Executor"] = multiprocessing.Process(target=run_trade_executor, args=(trade_signal_queue,), daemon=True) processes["Dashboard Data"] = multiprocessing.Process(target=run_dashboard_data_fetcher, daemon=True) + + processes["Position Manager"] = multiprocessing.Process( + target=run_position_manager, + args=(trade_signal_queue, order_execution_queue), + daemon=True + ) + processes["Trade Executor"] = multiprocessing.Process( + target=run_trade_executor, + args=(order_execution_queue,), + daemon=True + ) for name, config in strategy_configs.items(): if config.get("enabled", False): @@ -459,7 +607,9 @@ if __name__ == "__main__": logging.error(f"Strategy '{name}' is missing 'class' key. Skipping.") continue proc = multiprocessing.Process(target=run_strategy, args=(name, config, trade_signal_queue), daemon=True) - processes[f"Strategy: {name}"] = proc + processes[f"Strategy: {name}"] = proc # Add to strategy group + + # --- REVERTED: Removed combined dict --- for name, proc in processes.items(): logging.info(f"Starting process '{name}'...") @@ -471,11 +621,47 @@ if __name__ == "__main__": try: app.run() except KeyboardInterrupt: + # --- MODIFIED: Staged shutdown --- logging.info("Shutting down...") - for proc in processes.values(): - if proc.is_alive(): proc.terminate() - for proc in processes.values(): - if proc.is_alive(): proc.join() + + strategy_procs = {} + other_procs = {} + for name, proc in processes.items(): + if name.startswith("Strategy:"): + strategy_procs[name] = proc + else: + other_procs[name] = proc + + # --- 1. Terminate strategy processes --- + logging.info("Shutting down strategy processes first...") + for name, proc in strategy_procs.items(): + if proc.is_alive(): + logging.info(f"Terminating process: '{name}'...") + proc.terminate() + + # --- 2. Wait for 5 seconds --- + logging.info("Waiting 5 seconds for strategies to close...") + time.sleep(5) + + # --- 3. Terminate all other processes --- + logging.info("Shutting down remaining core processes...") + for name, proc in other_procs.items(): + if proc.is_alive(): + logging.info(f"Terminating process: '{name}'...") + proc.terminate() + + # --- 4. Join all processes (strategies and others) --- + logging.info("Waiting for all processes to join...") + for name, proc in processes.items(): # Iterate over the original dict to get all + if proc.is_alive(): + logging.info(f"Waiting for process '{name}' to join...") + proc.join(timeout=5) # Wait up to 5 seconds + if proc.is_alive(): + # If it's still alive, force kill + logging.warning(f"Process '{name}' did not terminate, forcing kill.") + proc.kill() + # --- END MODIFIED --- + logging.info("Shutdown complete.") sys.exit(0) diff --git a/position_manager.py b/position_manager.py new file mode 100644 index 0000000..8533564 --- /dev/null +++ b/position_manager.py @@ -0,0 +1,168 @@ +import logging +import os +import sys +import json +import time +import multiprocessing +import numpy as np # Import numpy to handle np.float64 +from datetime import datetime, timezone + +from logging_utils import setup_logging +from trade_log import log_trade + +class PositionManager: + """ + Listens for strategy signals, READS the current position state, + and sends explicit execution orders to the TradeExecutor. + It does NOT write to the position state file. + """ + + def __init__(self, log_level: str, trade_signal_queue: multiprocessing.Queue, order_execution_queue: multiprocessing.Queue): + # Note: Logging is set up by the run_position_manager function + + self.trade_signal_queue = trade_signal_queue + self.order_execution_queue = order_execution_queue + + self.opened_positions_file = os.path.join("_data", "opened_positions.json") + + # --- MODIFIED: Load state, but will not save it --- + self.opened_positions = self._load_opened_positions() + if self.opened_positions: + logging.info(f"Position Manager started. Loaded {len(self.opened_positions)} open positions (read-only).") + else: + logging.info("Position Manager started. No initial positions found.") + + + def _load_opened_positions(self) -> dict: + """Loads the state of currently managed positions from a JSON file.""" + if not os.path.exists(self.opened_positions_file): + return {} + try: + with open(self.opened_positions_file, 'r', encoding='utf-8') as f: + return json.load(f) + except (json.JSONDecodeError, IOError) as e: + logging.error(f"Failed to read '{self.opened_positions_file}': {e}. Starting with empty state.", exc_info=True) + return {} + + # --- REMOVED: _save_opened_positions method --- + # (The TradeExecutor is now responsible for saving) + + def send_order(self, order_data: dict): + """Helper function to put a standardized order onto the execution queue.""" + logging.info(f"Sending order to executor: {order_data}") + self.order_execution_queue.put(order_data) + + def run(self): + """ + Main execution loop. Blocks and waits for a signal from the queue. + Converts strategy signals into execution orders based on current state. + """ + logging.info("Position Manager started. Waiting for signals...") + while True: + try: + trade_signal = self.trade_signal_queue.get() + if not trade_signal: + continue + + logging.info(f"Received signal: {trade_signal}") + + # --- NEW: Reload the position state on every signal --- + # This ensures we have the most up-to-date state from the Executor + self.opened_positions = self._load_opened_positions() + + name = trade_signal['strategy_name'] + config = trade_signal['config'] + params = config['parameters'] + coin = trade_signal['coin'].upper() + desired_signal = trade_signal['signal'] + + signal_price = trade_signal.get('signal_price') + if isinstance(signal_price, np.float64): + signal_price = float(signal_price) + + if not signal_price or signal_price <= 0: + logging.warning(f"[{name}] Signal received with invalid price ({signal_price}). Skipping.") + continue + + # --- Handle copy_trader's nested config --- + if 'coins_to_copy' in params: + # ... (omitted for brevity, this logic is correct and unchanged) ... + matching_coin_key = next((k for k in params['coins_to_copy'] if k.upper() == coin), None) + if matching_coin_key: + coin_config = params['coins_to_copy'][matching_coin_key] + params['size'] = coin_config.get('size') + params['leverage_long'] = coin_config.get('leverage_long', 2) + params['leverage_short'] = coin_config.get('leverage_short', 2) + + size = params.get('size') + if not size: + logging.error(f"[{name}] Signal received with no 'size'. Skipping trade.") + continue + + leverage_long = int(params.get('leverage_long', 2)) + leverage_short = int(params.get('leverage_short', 2)) + agent_name = (config.get("agent") or "default").lower() + + # --- NEW: Stateful decision making --- + position_key = f"{name}_{coin}" + current_position = self.opened_positions.get(position_key) + + logging.info(f"[{name}] Processing signal '{desired_signal}'. Current state: {current_position['side'] if current_position else 'FLAT'}") + + order_data = { + "agent": agent_name, + "coin": coin, + "limit_px": signal_price, + # --- NEW: Pass all context to the executor --- + "strategy": name, + "position_key": position_key, + "open_price": signal_price, + "open_time_utc": datetime.now(timezone.utc).isoformat(), + "amount": size + } + + if desired_signal == "OPEN_LONG": + if current_position: + logging.info(f"[{name}] Ignoring OPEN_LONG signal, already in a position.") + continue + + logging.warning(f"[{name}] ACTION: Setting leverage to {leverage_long}x and opening LONG.") + self.send_order({**order_data, "action": "update_leverage", "is_buy": True, "size": leverage_long}) + self.send_order({**order_data, "action": "market_open", "is_buy": True, "size": size}) + log_trade(strategy=name, coin=coin, action="OPEN_LONG", price=signal_price, size=size, signal=desired_signal) + + elif desired_signal == "OPEN_SHORT": + if current_position: + logging.info(f"[{name}] Ignoring OPEN_SHORT signal, already in a position.") + continue + + logging.warning(f"[{name}] ACTION: Setting leverage to {leverage_short}x and opening SHORT.") + self.send_order({**order_data, "action": "update_leverage", "is_buy": False, "size": leverage_short}) + self.send_order({**order_data, "action": "market_open", "is_buy": False, "size": size}) + log_trade(strategy=name, coin=coin, action="OPEN_SHORT", price=signal_price, size=size, signal=desired_signal) + + elif desired_signal == "CLOSE_LONG": + if not current_position or current_position['side'] != 'long': + logging.info(f"[{name}] Ignoring CLOSE_LONG signal, not in a long position.") + continue + + logging.warning(f"[{name}] ACTION: Closing LONG position.") + self.send_order({**order_data, "action": "market_close", "is_buy": False, "size": size}) + log_trade(strategy=name, coin=coin, action="CLOSE_LONG", price=signal_price, size=size, signal=desired_signal) + + elif desired_signal == "CLOSE_SHORT": + if not current_position or current_position['side'] != 'short': + logging.info(f"[{name}] Ignoring CLOSE_SHORT signal, not in a short position.") + continue + + logging.warning(f"[{name}] ACTION: Closing SHORT position.") + self.send_order({**order_data, "action": "market_close", "is_buy": True, "size": size}) + log_trade(strategy=name, coin=coin, action="CLOSE_SHORT", price=signal_price, size=size, signal=desired_signal) + + else: + logging.warning(f"[{name}] Received unhandled signal '{desired_signal}'. No action taken.") + + except Exception as e: + logging.error(f"An error occurred in the position manager loop: {e}", exc_info=True) + time.sleep(1) + diff --git a/strategies/copy_trader_strategy.py b/strategies/copy_trader_strategy.py index 9f086ea..e2aaac4 100644 --- a/strategies/copy_trader_strategy.py +++ b/strategies/copy_trader_strategy.py @@ -1,6 +1,7 @@ import logging import time import json +import os from datetime import datetime, timezone from hyperliquid.info import Info from hyperliquid.utils import constants @@ -12,13 +13,16 @@ class CopyTraderStrategy(BaseStrategy): An event-driven strategy that monitors a target wallet address and copies its trades for a specific set of allowed coins, using per-coin size and leverage settings. + + This strategy is STATEFUL and tracks its own positions. """ def __init__(self, strategy_name: str, params: dict, trade_signal_queue, shared_status: dict = None): super().__init__(strategy_name, params, trade_signal_queue, shared_status) self.target_address = self.params.get("target_address", "").lower() - self.coins_to_copy = self.params.get("coins_to_copy", {}) + # Convert all coin keys to uppercase for consistency + self.coins_to_copy = {k.upper(): v for k, v in self.coins_to_copy.items()} self.allowed_coins = list(self.coins_to_copy.keys()) if not self.target_address: @@ -29,17 +33,63 @@ class CopyTraderStrategy(BaseStrategy): self.info = None # Will be initialized in the run loop - # --- FIX: Set initial state to "WAIT" --- - self.current_signal = "WAIT" + # --- MODIFIED: Load and manage its own position state --- + self.position_state_file = os.path.join("_data", f"strategy_state_{self.strategy_name}.json") + self.current_positions = self._load_position_state() + + # --- MODIFIED: Check if shared_status is None before using it --- + if self.shared_status is None: + logging.warning("No shared_status dictionary provided. Initializing a new one.") + self.shared_status = {} + + self.current_signal = self.shared_status.get("current_signal", "WAIT") + self.signal_price = self.shared_status.get("signal_price") + self.last_signal_change_utc = self.shared_status.get("last_signal_change_utc") - # Record the strategy's start time to ignore historical data self.start_time_utc = datetime.now(timezone.utc) logging.info(f"Strategy initialized. Ignoring all trades before {self.start_time_utc.isoformat()}") + logging.info(f"Loaded positions: {self.current_positions}") + + def _load_position_state(self) -> dict: + """Loads the strategy's current open positions from a file.""" + if os.path.exists(self.position_state_file): + try: + with open(self.position_state_file, 'r') as f: + logging.info(f"Loading existing position state from {self.position_state_file}") + return json.load(f) + except (IOError, json.JSONDecodeError): + logging.warning(f"Could not read position state file {self.position_state_file}. Starting fresh.") + return {} # { "ETH": {"side": "long", "size": 0.01, "entry": 3000}, ... } + + def _save_position_state(self): + """Saves the strategy's current open positions to a file.""" + try: + with open(self.position_state_file, 'w') as f: + json.dump(self.current_positions, f, indent=4) + except IOError as e: + logging.error(f"Failed to save position state: {e}") def calculate_signals(self, df): # This strategy is event-driven, so it does not use polling-based signal calculation. pass + def send_explicit_signal(self, signal: str, coin: str, price: float, trade_params: dict, size: float): + """Helper to send a formatted signal to the PositionManager.""" + config = { + "agent": self.params.get("agent"), + "parameters": trade_params + } + + self.trade_signal_queue.put({ + "strategy_name": self.strategy_name, + "signal": signal, # e.g., "OPEN_LONG", "CLOSE_SHORT" + "coin": coin, + "signal_price": price, + "config": config, + "size": size # Explicitly pass size + }) + logging.info(f"Explicit signal SENT: {signal} {coin} @ {price}, Size: {size}") + def on_fill_message(self, message): """ This is the callback function that gets triggered by the WebSocket @@ -66,57 +116,82 @@ class CopyTraderStrategy(BaseStrategy): logging.debug(f"Received {len(fills)} fill(s) for user {user_address}") for fill in fills: - # Check if the trade is new or historical - trade_time = datetime.fromtimestamp(fill['time'] / 1000, tz=timezone.utc) - if trade_time < self.start_time_utc: - logging.info(f"Ignoring stale/historical trade from {trade_time.isoformat()}") + # Check if the trade is new or historical + trade_time = datetime.fromtimestamp(fill['time'] / 1000, tz=timezone.utc) + if trade_time < self.start_time_utc: + logging.info(f"Ignoring stale/historical trade from {trade_time.isoformat()}") + continue + + coin = fill.get('coin').upper() + + if coin in self.allowed_coins: + side = fill.get('side') + price = float(fill.get('px')) + fill_size = float(fill.get('sz')) + + # Get our strategy's configured trade size for this coin + coin_config = self.coins_to_copy.get(coin) + if not coin_config or not coin_config.get("size"): + logging.warning(f"No trade size specified for {coin}. Ignoring fill.") continue - coin = fill.get('coin') + strategy_trade_size = coin_config.get("size") - if coin in self.allowed_coins: - side = fill.get('side') - price = float(fill.get('px')) - - signal = "HOLD" - if side == "B": - signal = "BUY" - elif side == "A": - signal = "SELL" + # Prepare config for the signal + trade_params = self.params.copy() + trade_params.update(coin_config) + + # Get our current position state for this coin + current_local_pos = self.current_positions.get(coin) + current_local_side = current_local_pos.get("side") if current_local_pos else None - coin_config = self.coins_to_copy.get(coin) - if not coin_config or not coin_config.get("size"): - logging.warning(f"No trade size specified for {coin}. Ignoring fill.") - continue - - # --- 1. Create the trade-specific config --- - trade_params = self.params.copy() - trade_params.update(coin_config) - trade_config = { - "agent": self.params.get("agent"), - "parameters": trade_params - } - - # --- 2. (PRIORITY) Put the signal on the queue for the executor --- - self.trade_signal_queue.put({ - "strategy_name": self.strategy_name, - "signal": signal, - "coin": coin, - "signal_price": price, - "config": trade_config - }) - - # --- 3. (Secondary) Update internal state and log --- - self.current_signal = signal + signal_sent = False + if side == "B": # Target bought + if current_local_side == "short": + # Flip: Close short, then open long + logging.warning(f"[{coin}] Target BOUGHT, we are SHORT. Flipping to LONG.") + self.send_explicit_signal("CLOSE_SHORT", coin, price, trade_params, current_local_pos.get("size")) + self.send_explicit_signal("OPEN_LONG", coin, price, trade_params, strategy_trade_size) + self.current_positions[coin] = {"side": "long", "size": strategy_trade_size, "entry": price} + signal_sent = True + elif current_local_side is None: + # New: Open long + logging.warning(f"[{coin}] Target BOUGHT, we are FLAT. Opening LONG.") + self.send_explicit_signal("OPEN_LONG", coin, price, trade_params, strategy_trade_size) + self.current_positions[coin] = {"side": "long", "size": strategy_trade_size, "entry": price} + signal_sent = True + else: # We are already long + logging.info(f"[{coin}] Target BOUGHT, we are already LONG. Ignoring.") + + elif side == "A": # Target sold + if current_local_side == "long": + # Flip: Close long, then open short + logging.warning(f"[{coin}] Target SOLD, we are LONG. Flipping to SHORT.") + self.send_explicit_signal("CLOSE_LONG", coin, price, trade_params, current_local_pos.get("size")) + self.send_explicit_signal("OPEN_SHORT", coin, price, trade_params, strategy_trade_size) + self.current_positions[coin] = {"side": "short", "size": strategy_trade_size, "entry": price} + signal_sent = True + elif current_local_side is None: + # New: Open short + logging.warning(f"[{coin}] Target SOLD, we are FLAT. Opening SHORT.") + self.send_explicit_signal("OPEN_SHORT", coin, price, trade_params, strategy_trade_size) + self.current_positions[coin] = {"side": "short", "size": strategy_trade_size, "entry": price} + signal_sent = True + else: # We are already short + logging.info(f"[{coin}] Target SOLD, we are already SHORT. Ignoring.") + + if signal_sent: + # Update dashboard status + self.current_signal = f"{side} @ {coin}" self.signal_price = price self.last_signal_change_utc = trade_time.isoformat() - self._save_status() # Update the dashboard status file - - logging.warning(f"Copy trade signal SENT for {coin}: {signal} @ {price}, Size: {coin_config['size']}") - logging.info(f"Source trade logged: {json.dumps(fill)}") + # --- MODIFIED: Save BOTH status files --- + self._save_status() # For dashboard + self._save_position_state() # For our internal tracking - else: - logging.info(f"Ignoring fill for unmonitored coin: {coin}") + logging.info(f"Source trade logged: {json.dumps(fill)}") + else: + logging.info(f"Ignoring fill for unmonitored coin: {coin}") except Exception as e: logging.error(f"Error in on_fill_message: {e}", exc_info=True) @@ -142,37 +217,81 @@ class CopyTraderStrategy(BaseStrategy): This method overrides the default polling loop. It establishes a persistent WebSocket connection and runs a watchdog to ensure it stays connected. + + It also catches KeyboardInterrupt to gracefully shut down positions. """ - if not self._connect_and_subscribe(): - # If connection fails on start, wait 60s before letting the process restart - time.sleep(60) - return + try: + if not self._connect_and_subscribe(): + # If connection fails on start, wait 60s before letting the process restart + time.sleep(60) + return - # --- ADDED: Save the initial "WAIT" status --- - self._save_status() + # Save the initial "WAIT" status + self._save_status() - while True: - try: - time.sleep(15) # Check the connection every 15 seconds - - if self.info is None or not self.info.ws_manager.is_alive(): - logging.error(f"WebSocket connection lost. Attempting to reconnect...") + while True: + try: + time.sleep(15) # Check the connection every 15 seconds - if self.info and self.info.ws_manager: - try: - self.info.ws_manager.stop() - except Exception as e: - logging.error(f"Error stopping old ws_manager: {e}") - - if not self._connect_and_subscribe(): - logging.error("Reconnect failed, will retry in 15s.") + if self.info is None or not self.info.ws_manager.is_alive(): + logging.error(f"WebSocket connection lost. Attempting to reconnect...") + + if self.info and self.info.ws_manager: + try: + self.info.ws_manager.stop() + except Exception as e: + logging.error(f"Error stopping old ws_manager: {e}") + + if not self._connect_and_subscribe(): + logging.error("Reconnect failed, will retry in 15s.") + else: + logging.info("Successfully reconnected to WebSocket.") + self._save_status() else: - logging.info("Successfully reconnected to WebSocket.") - # After reconnecting, save the current status again - self._save_status() - else: - logging.debug("Watchdog check: WebSocket connection is active.") - - except Exception as e: - logging.error(f"An error occurred in the watchdog loop: {e}", exc_info=True) + logging.debug("Watchdog check: WebSocket connection is active.") + + except Exception as e: + logging.error(f"An error occurred in the watchdog loop: {e}", exc_info=True) + + except KeyboardInterrupt: # --- THIS IS THE GRACEFUL SHUTDOWN LOGIC --- + logging.warning(f"Shutdown signal received. Closing all open positions for '{self.strategy_name}'...") + + # Use a copy of the items to avoid runtime modification errors + for coin, position in list(self.current_positions.items()): + current_side = position.get("side") + trade_size = position.get("size") + + if not current_side or not trade_size: + continue + + # Find the config for this coin + coin_config = self.coins_to_copy.get(coin.upper(), {}) + trade_params = self.params.copy() + trade_params.update(coin_config) + + # Use the last entry price as a placeholder for the market close order + price = position.get("entry", 1) # Use 1 as a failsafe + + if current_side == "long": + logging.warning(f"Sending CLOSE_LONG for {coin}, {price}, {trade_size}...") + #self.send_explicit_signal("CLOSE_LONG", coin, price, trade_params, trade_size) + #del self.current_positions[coin] # Assume it will close + elif current_side == "short": + logging.warning(f"Sending CLOSE_SHORT for {coin}, {price}, {trade_size} ...") + #self.send_explicit_signal("CLOSE_SHORT", coin, price, trade_params, trade_size) + #del self.current_positions[coin] # Assume it will close + + self._save_position_state() # Save the new empty state + logging.info("All closing signals sent. Exiting strategy.") + + except Exception as e: + logging.error(f"An unhandled error occurred in run_event_loop: {e}", exc_info=True) + + finally: + if self.info and self.info.ws_manager and self.info.ws_manager.is_alive(): + try: + self.info.ws_manager.stop() + logging.info("WebSocket connection stopped.") + except Exception as e: + logging.error(f"Error stopping ws_manager on exit: {e}") diff --git a/trade_executor.py b/trade_executor.py index 4a1f40c..2de81c7 100644 --- a/trade_executor.py +++ b/trade_executor.py @@ -4,6 +4,7 @@ import os import sys import json import time +# --- REVERTED: Removed math import --- from datetime import datetime import multiprocessing @@ -14,48 +15,44 @@ from hyperliquid.utils import constants from dotenv import load_dotenv from logging_utils import setup_logging -from trade_log import log_trade -# Load environment variables from a .env file load_dotenv() class TradeExecutor: """ - Monitors a shared queue for strategy signals and executes trades. - This script is now a dedicated, event-driven consumer. + Executes orders from a queue and, upon API success, + updates the shared 'opened_positions.json' state file. + It is the single source of truth for position state. """ - def __init__(self, log_level: str, trade_signal_queue: multiprocessing.Queue, shared_executor_status: dict = None): - setup_logging(log_level, 'TradeExecutor') - - self.trade_signal_queue = trade_signal_queue - - # Optional Manager.dict() to store live managed positions and other executor status - self.shared_executor_status = shared_executor_status - + def __init__(self, log_level: str, order_execution_queue: multiprocessing.Queue): + # Note: Logging is set up by the run_trade_executor function + + self.order_execution_queue = order_execution_queue + self.vault_address = os.environ.get("MAIN_WALLET_ADDRESS") if not self.vault_address: logging.error("MAIN_WALLET_ADDRESS not set.") - # --- FIX: Raise an exception instead of sys.exit() --- - # This allows the main_app process manager to catch and log the error. - raise ValueError("MAIN_WALLET_ADDRESS not set in environment.") + sys.exit(1) - # --- FIX: Corrected constant name from MAIN_NET_API_URL to MAINNET_API_URL --- self.info = Info(constants.MAINNET_API_URL, skip_ws=True) self.exchanges = self._load_agents() if not self.exchanges: logging.error("No trading agents found in .env file.") - # --- FIX: Raise an exception instead of sys.exit() --- - raise ValueError("No trading agents found in .env file. Check AGENT_PRIVATE_KEY or _AGENT_PK vars.") + sys.exit(1) + + # --- REVERTED: Removed asset_meta loading --- + # self.asset_meta = self._load_asset_metadata() + + # --- NEW: State management logic --- + self.opened_positions_file = os.path.join("_data", "opened_positions.json") + self.opened_positions = self._load_opened_positions() + + logging.info(f"Trade Executor started. Loaded {len(self.opened_positions)} positions.") - self.managed_positions_path = os.path.join("_data", "executor_managed_positions.json") - self.managed_positions = self._load_managed_positions() - logging.info(f"TradeExecutor initialized. Agents available: {list(self.exchanges.keys())}") def _load_agents(self) -> dict: - """ - Discovers and initializes agents by scanning for environment variables. - """ + # ... (omitted for brevity, this logic is correct and unchanged) ... exchanges = {} logging.info("Discovering agents from environment variables...") for env_var, private_key in os.environ.items(): @@ -73,179 +70,122 @@ class TradeExecutor: except Exception as e: logging.error(f"Failed to initialize agent '{agent_name}': {e}") return exchanges + + # --- REVERTED: Removed asset metadata loading --- + # def _load_asset_metadata(self) -> dict: ... - def _load_managed_positions(self) -> dict: - """Loads the state of which strategy manages which position.""" - # Prefer shared in-memory state when available + # --- NEW: Position state save/load methods --- + def _load_opened_positions(self) -> dict: + """Loads the state of currently managed positions from a JSON file.""" + if not os.path.exists(self.opened_positions_file): + return {} try: - if self.shared_executor_status is not None: - mgr = self.shared_executor_status.get('managed_positions') if isinstance(self.shared_executor_status, dict) else None - if mgr: - logging.info("Loading managed positions from shared executor status.") - return dict(mgr) - except Exception: - logging.debug("Unable to read managed positions from shared status. Falling back to file.") + with open(self.opened_positions_file, 'r', encoding='utf-8') as f: + return json.load(f) + except (json.JSONDecodeError, IOError) as e: + logging.error(f"Failed to read '{self.opened_positions_file}': {e}. Starting with empty state.", exc_info=True) + return {} - if os.path.exists(self.managed_positions_path): - try: - with open(self.managed_positions_path, 'r') as f: - logging.info("Loading existing managed positions state from file.") - return json.load(f) - except (IOError, json.JSONDecodeError): - logging.warning("Could not read managed positions file. Starting fresh.") - return {} - - def _save_managed_positions(self): - """Saves the current state of managed positions.""" + def _save_opened_positions(self): + """Saves the current state of managed positions to a JSON file.""" try: - if self.shared_executor_status is not None: - try: - # store under a known key - self.shared_executor_status['managed_positions'] = dict(self.managed_positions) - except Exception: - # fallback: try direct assignment - self.shared_executor_status['managed_positions'] = self.managed_positions - else: - with open(self.managed_positions_path, 'w') as f: - json.dump(self.managed_positions, f, indent=4) + with open(self.opened_positions_file, 'w', encoding='utf-8') as f: + json.dump(self.opened_positions, f, indent=4) + logging.debug(f"Successfully saved {len(self.opened_positions)} positions to '{self.opened_positions_file}'") except IOError as e: - logging.error(f"Failed to save managed positions state: {e}") + logging.error(f"Failed to write to '{self.opened_positions_file}': {e}", exc_info=True) + + # --- REVERTED: Removed tick rounding function --- + # def _round_to_tick(self, price, tick_size): ... def run(self): """ - Main execution loop. Blocks and waits for a signal from the queue. + Main execution loop. Waits for an order and updates state on success. """ - logging.info("Trade Executor started. Waiting for signals...") + logging.info("Trade Executor started. Waiting for orders...") while True: try: - trade_signal = self.trade_signal_queue.get() - if not trade_signal: + order = self.order_execution_queue.get() + if not order: continue - logging.info(f"Received signal: {trade_signal}") + logging.info(f"Received order: {order}") - # Basic validation and debug information to help trace gaps - if 'config' not in trade_signal: - logging.error(f"Signal missing 'config' key. Ignoring: {trade_signal}") - continue - if 'strategy_name' not in trade_signal: - logging.error(f"Signal missing 'strategy_name' key. Ignoring: {trade_signal}") - continue - # Special command handling - if isinstance(trade_signal, dict) and trade_signal.get('_cmd') == 'CLOSE_ALL': - target_agent = trade_signal.get('agent') - logging.warning(f"Received CLOSE_ALL command for agent: {target_agent}") - if not target_agent: - logging.error("CLOSE_ALL command missing 'agent' field. Ignoring.") - continue - - # Iterate managed positions and close those opened by the target agent - to_close = [s for s, v in self.managed_positions.items() if v.get('agent') == target_agent] - if not to_close: - logging.info(f"No managed positions found for agent '{target_agent}'.") - continue - - for sname in to_close: - pos = self.managed_positions.get(sname) - if not pos: - continue - coin = pos.get('coin') - side = pos.get('side') - size = pos.get('size') - # Determine is_buy to neutralize the position - is_buy = True if side == 'short' else False - logging.warning(f"[CLOSE_ALL] Closing {side} position for strategy {sname}, coin {coin}, size {size}") - try: - # Use the agent's exchange if available - exch = self.exchanges.get(target_agent) - if exch: - exch.market_open(coin, is_buy, size, None, 0.01) - else: - logging.error(f"Exchange object for agent '{target_agent}' not found. Skipping live close for {sname}.") - except Exception as e: - logging.error(f"Error closing position for {sname}: {e}") - # remove from managed positions regardless to avoid stuck state - try: - del self.managed_positions[sname] - except KeyError: - pass - - self._save_managed_positions() - logging.info(f"CLOSE_ALL for agent '{target_agent}' completed.") - continue - - name = trade_signal['strategy_name'] - config = trade_signal['config'] - params = config.get('parameters', {}) - coin = trade_signal['coin'] - desired_signal = trade_signal['signal'] - status = trade_signal - - size = params.get('size') - if size is None: - logging.error(f"[{name}] No 'size' in parameters: {params}. Skipping.") - continue - leverage_long = int(params.get('leverage_long', 2)) - leverage_short = int(params.get('leverage_short', 2)) - current_position = self.managed_positions.get(name) + agent_name = order['agent'] + action = order['action'] + coin = order['coin'] + is_buy = order['is_buy'] + size = order['size'] + limit_px = order.get('limit_px') - agent_name = (config.get("agent") or "default").lower() exchange_to_use = self.exchanges.get(agent_name) if not exchange_to_use: - logging.error(f"[{name}] Agent '{agent_name}' not found. Available agents: {list(self.exchanges.keys())}. Skipping trade.") + logging.error(f"Agent '{agent_name}' not found. Skipping order.") continue - - # --- State Machine Logic (now runs instantly on signal) --- - if desired_signal == "BUY" or desired_signal == "INIT_BUY": - if not current_position: - logging.warning(f"[{name}] ACTION: Setting leverage to {leverage_long}x and opening LONG for {coin}.") - exchange_to_use.update_leverage(leverage_long, coin) - exchange_to_use.market_open(coin, True, size, None, 0.01) - self.managed_positions[name] = {"coin": coin, "side": "long", "size": size} - log_trade(strategy=name, coin=coin, action="OPEN_LONG", price=status.get('signal_price', 0), size=size, signal=desired_signal) - elif current_position['side'] == 'short': - logging.warning(f"[{name}] ACTION: Closing SHORT and opening LONG for {coin} with {leverage_long}x leverage.") - exchange_to_use.update_leverage(leverage_long, coin) - # 1. Close the short by buying back (this is a market_open, but is_buy=True) - exchange_to_use.market_open(coin, True, current_position['size'], None, 0.01) - log_trade(strategy=name, coin=coin, action="CLOSE_SHORT", price=status.get('signal_price', 0), size=current_position['size'], signal=desired_signal) - # 2. Open the new long - exchange_to_use.market_open(coin, True, size, None, 0.01) - self.managed_positions[name] = {"coin": coin, "side": "long", "size": size} - log_trade(strategy=name, coin=coin, action="OPEN_LONG", price=status.get('signal_price', 0), size=size, signal=desired_signal) - - elif desired_signal == "SELL" or desired_signal == "INIT_SELL": - if not current_position: - logging.warning(f"[{name}] ACTION: Setting leverage to {leverage_short}x and opening SHORT for {coin}.") - exchange_to_use.update_leverage(leverage_short, coin) - exchange_to_use.market_open(coin, False, size, None, 0.01) - self.managed_positions[name] = {"coin": coin, "side": "short", "size": size} - log_trade(strategy=name, coin=coin, action="OPEN_SHORT", price=status.get('signal_price', 0), size=size, signal=desired_signal) - elif current_position['side'] == 'long': - logging.warning(f"[{name}] ACTION: Closing LONG and opening SHORT for {coin} with {leverage_short}x leverage.") - exchange_to_use.update_leverage(leverage_short, coin) - # 1. Close the long by selling - exchange_to_use.market_open(coin, False, current_position['size'], None, 0.01) - log_trade(strategy=name, coin=coin, action="CLOSE_LONG", price=status.get('signal_price', 0), size=current_position['size'], signal=desired_signal) - # 2. Open the new short - exchange_to_use.market_open(coin, False, size, None, 0.01) - self.managed_positions[name] = {"coin": coin, "side": "short", "size": size} - # --- FIX: Corrected typo from 'signal.desired_signal' to 'signal=desired_signal' --- - log_trade(strategy=name, coin=coin, action="OPEN_SHORT", price=status.get('signal_price', 0), size=size, signal=desired_signal) - elif desired_signal == "FLAT": - if current_position: - logging.warning(f"[{name}] ACTION: Close {current_position['side']} position for {coin}.") - is_buy = current_position['side'] == 'short' - exchange_to_use.market_open(coin, is_buy, current_position['size'], None, 0.01) - del self.managed_positions[name] - log_trade(strategy=name, coin=coin, action=f"CLOSE_{current_position['side'].upper()}", price=status.get('signal_price', 0), size=current_position['size'], signal=desired_signal) + response = None - self._save_managed_positions() + if action == "market_open" or action == "market_close": + reduce_only = (action == "market_close") + log_action = "MARKET CLOSE" if reduce_only else "MARKET OPEN" + logging.warning(f"ACTION: {log_action} {coin} {'BUY' if is_buy else 'SELL'} {size}") + + # --- REVERTED: Removed all slippage and rounding logic --- + # The raw limit_px from the order is now used directly + final_price = limit_px + logging.info(f"[{agent_name}] Using raw price for {coin}: {final_price}") + + order_type = {"limit": {"tif": "Ioc"}} + # --- REVERTED: Uses final_price (which is just limit_px) --- + response = exchange_to_use.order(coin, is_buy, size, final_price, order_type, reduce_only=reduce_only) + logging.info(f"Market order response: {response}") + + # --- NEW: STATE UPDATE ON SUCCESS --- + if response.get("status") == "ok": + response_data = response.get("response", {}).get("data", {}) + if response_data and "statuses" in response_data: + # Check if the order status contains an error + if "error" not in response_data["statuses"][0]: + position_key = order['position_key'] + if action == "market_open": + # Add to state + self.opened_positions[position_key] = { + "strategy": order['strategy'], + "coin": coin, + "side": "long" if is_buy else "short", + "open_time_utc": order['open_time_utc'], + "open_price": order['open_price'], + "amount": order['amount'] + } + logging.info(f"Successfully opened position {position_key}. Saving state.") + elif action == "market_close": + # Remove from state + if position_key in self.opened_positions: + del self.opened_positions[position_key] + logging.info(f"Successfully closed position {position_key}. Saving state.") + else: + logging.warning(f"Received close confirmation for {position_key}, but it was not in state.") + + self._save_opened_positions() # Save state to disk + + else: + logging.error(f"API Error for {action}: {response_data['statuses'][0]['error']}") + else: + logging.error(f"Unexpected API response format: {response}") + else: + logging.error(f"API call failed, status: {response.get('status')}") + + + elif action == "update_leverage": + leverage = int(size) + logging.warning(f"ACTION: UPDATE LEVERAGE {coin} to {leverage}x") + response = exchange_to_use.update_leverage(leverage, coin) + logging.info(f"Update leverage response: {response}") + + else: + logging.warning(f"Received unknown action: {action}") except Exception as e: logging.error(f"An error occurred in the main executor loop: {e}", exc_info=True) time.sleep(1) -# This script is no longer run directly, but is called by main_app.py - From 5f9109c3a9b894aa194d54f492f7adaae582a913 Mon Sep 17 00:00:00 2001 From: DiTus Date: Sun, 2 Nov 2025 22:38:31 +0100 Subject: [PATCH 12/18] size taken from monitored wallet --- position_manager.py | 176 +++++++++--------- strategies/copy_trader_strategy.py | 280 +++++++++++++++++------------ trade_executor.py | 6 +- 3 files changed, 261 insertions(+), 201 deletions(-) diff --git a/position_manager.py b/position_manager.py index 8533564..c355c29 100644 --- a/position_manager.py +++ b/position_manager.py @@ -5,16 +5,17 @@ import json import time import multiprocessing import numpy as np # Import numpy to handle np.float64 -from datetime import datetime, timezone from logging_utils import setup_logging from trade_log import log_trade class PositionManager: """ - Listens for strategy signals, READS the current position state, - and sends explicit execution orders to the TradeExecutor. - It does NOT write to the position state file. + (Stateless) Listens for EXPLICIT signals (e.g., "OPEN_LONG") from all + strategies and converts them into specific execution orders + (e.g., "market_open") for the TradeExecutor. + + It holds NO position state. """ def __init__(self, log_level: str, trade_signal_queue: multiprocessing.Queue, order_execution_queue: multiprocessing.Queue): @@ -23,39 +24,34 @@ class PositionManager: self.trade_signal_queue = trade_signal_queue self.order_execution_queue = order_execution_queue - self.opened_positions_file = os.path.join("_data", "opened_positions.json") + # --- REMOVED: All state management --- - # --- MODIFIED: Load state, but will not save it --- - self.opened_positions = self._load_opened_positions() - if self.opened_positions: - logging.info(f"Position Manager started. Loaded {len(self.opened_positions)} open positions (read-only).") - else: - logging.info("Position Manager started. No initial positions found.") + logging.info("Position Manager (Stateless) started.") + # --- REMOVED: _load_managed_positions method --- + # --- REMOVED: _save_managed_positions method --- + # --- REMOVED: All tick/rounding/meta logic --- - def _load_opened_positions(self) -> dict: - """Loads the state of currently managed positions from a JSON file.""" - if not os.path.exists(self.opened_positions_file): - return {} - try: - with open(self.opened_positions_file, 'r', encoding='utf-8') as f: - return json.load(f) - except (json.JSONDecodeError, IOError) as e: - logging.error(f"Failed to read '{self.opened_positions_file}': {e}. Starting with empty state.", exc_info=True) - return {} - - # --- REMOVED: _save_opened_positions method --- - # (The TradeExecutor is now responsible for saving) - - def send_order(self, order_data: dict): + def send_order(self, agent: str, action: str, coin: str, is_buy: bool, size: float, reduce_only: bool = False, limit_px=None, sl_px=None, tp_px=None): """Helper function to put a standardized order onto the execution queue.""" + order_data = { + "agent": agent, + "action": action, + "coin": coin, + "is_buy": is_buy, + "size": size, + "reduce_only": reduce_only, + "limit_px": limit_px, + "sl_px": sl_px, + "tp_px": tp_px, + } logging.info(f"Sending order to executor: {order_data}") self.order_execution_queue.put(order_data) def run(self): """ Main execution loop. Blocks and waits for a signal from the queue. - Converts strategy signals into execution orders based on current state. + Converts explicit strategy signals into execution orders. """ logging.info("Position Manager started. Waiting for signals...") while True: @@ -66,103 +62,109 @@ class PositionManager: logging.info(f"Received signal: {trade_signal}") - # --- NEW: Reload the position state on every signal --- - # This ensures we have the most up-to-date state from the Executor - self.opened_positions = self._load_opened_positions() - name = trade_signal['strategy_name'] config = trade_signal['config'] params = config['parameters'] coin = trade_signal['coin'].upper() + + # --- NEW: The signal is now the explicit action --- desired_signal = trade_signal['signal'] - signal_price = trade_signal.get('signal_price') + status = trade_signal + + signal_price = status.get('signal_price') if isinstance(signal_price, np.float64): signal_price = float(signal_price) if not signal_price or signal_price <= 0: - logging.warning(f"[{name}] Signal received with invalid price ({signal_price}). Skipping.") + logging.warning(f"[{name}] Signal received with invalid or missing price ({signal_price}). Skipping.") continue - # --- Handle copy_trader's nested config --- + # --- This logic is still needed for copy_trader's nested config --- + # --- But ONLY for finding leverage, not size --- if 'coins_to_copy' in params: - # ... (omitted for brevity, this logic is correct and unchanged) ... - matching_coin_key = next((k for k in params['coins_to_copy'] if k.upper() == coin), None) + logging.info(f"[{name}] Detected 'coins_to_copy'. Entering copy_trader logic...") + matching_coin_key = None + for key in params['coins_to_copy'].keys(): + if key.upper() == coin: + matching_coin_key = key + break + if matching_coin_key: - coin_config = params['coins_to_copy'][matching_coin_key] - params['size'] = coin_config.get('size') - params['leverage_long'] = coin_config.get('leverage_long', 2) - params['leverage_short'] = coin_config.get('leverage_short', 2) + coin_specific_config = params['coins_to_copy'][matching_coin_key] + else: + coin_specific_config = {} + + # --- REMOVED: size = coin_specific_config.get('size') --- + + params['leverage_long'] = coin_specific_config.get('leverage_long', 2) + params['leverage_short'] = coin_specific_config.get('leverage_short', 2) - size = params.get('size') - if not size: - logging.error(f"[{name}] Signal received with no 'size'. Skipping trade.") + # --- FIX: Read the size from the ROOT of the trade signal --- + size = trade_signal.get('size') + if not size or size <= 0: + logging.error(f"[{name}] Signal received with no 'size' or invalid size ({size}). Skipping trade.") continue + # --- END FIX --- leverage_long = int(params.get('leverage_long', 2)) leverage_short = int(params.get('leverage_short', 2)) + agent_name = (config.get("agent") or "default").lower() - # --- NEW: Stateful decision making --- - position_key = f"{name}_{coin}" - current_position = self.opened_positions.get(position_key) + logging.info(f"[{name}] Agent set to: {agent_name}") - logging.info(f"[{name}] Processing signal '{desired_signal}'. Current state: {current_position['side'] if current_position else 'FLAT'}") - - order_data = { - "agent": agent_name, - "coin": coin, - "limit_px": signal_price, - # --- NEW: Pass all context to the executor --- - "strategy": name, - "position_key": position_key, - "open_price": signal_price, - "open_time_utc": datetime.now(timezone.utc).isoformat(), - "amount": size - } + # --- REMOVED: current_position check --- + + # --- Use pure signal_price directly for the limit_px --- + limit_px = signal_price + logging.info(f"[{name}] Using pure signal price for limit_px: {limit_px}") + # --- NEW: Stateless Signal-to-Order Conversion --- + if desired_signal == "OPEN_LONG": - if current_position: - logging.info(f"[{name}] Ignoring OPEN_LONG signal, already in a position.") - continue - - logging.warning(f"[{name}] ACTION: Setting leverage to {leverage_long}x and opening LONG.") - self.send_order({**order_data, "action": "update_leverage", "is_buy": True, "size": leverage_long}) - self.send_order({**order_data, "action": "market_open", "is_buy": True, "size": size}) + logging.warning(f"[{name}] ACTION: Opening LONG for {coin}.") + # --- REMOVED: Leverage update signal --- + self.send_order(agent_name, "market_open", coin, True, size, limit_px=limit_px) log_trade(strategy=name, coin=coin, action="OPEN_LONG", price=signal_price, size=size, signal=desired_signal) - + elif desired_signal == "OPEN_SHORT": - if current_position: - logging.info(f"[{name}] Ignoring OPEN_SHORT signal, already in a position.") - continue - - logging.warning(f"[{name}] ACTION: Setting leverage to {leverage_short}x and opening SHORT.") - self.send_order({**order_data, "action": "update_leverage", "is_buy": False, "size": leverage_short}) - self.send_order({**order_data, "action": "market_open", "is_buy": False, "size": size}) + logging.warning(f"[{name}] ACTION: Opening SHORT for {coin}.") + # --- REMOVED: Leverage update signal --- + self.send_order(agent_name, "market_open", coin, False, size, limit_px=limit_px) log_trade(strategy=name, coin=coin, action="OPEN_SHORT", price=signal_price, size=size, signal=desired_signal) elif desired_signal == "CLOSE_LONG": - if not current_position or current_position['side'] != 'long': - logging.info(f"[{name}] Ignoring CLOSE_LONG signal, not in a long position.") - continue - - logging.warning(f"[{name}] ACTION: Closing LONG position.") - self.send_order({**order_data, "action": "market_close", "is_buy": False, "size": size}) + logging.warning(f"[{name}] ACTION: Closing LONG position for {coin}.") + # A "market_close" for a LONG is a SELL order + self.send_order(agent_name, "market_close", coin, False, size, limit_px=limit_px) log_trade(strategy=name, coin=coin, action="CLOSE_LONG", price=signal_price, size=size, signal=desired_signal) elif desired_signal == "CLOSE_SHORT": - if not current_position or current_position['side'] != 'short': - logging.info(f"[{name}] Ignoring CLOSE_SHORT signal, not in a short position.") - continue - - logging.warning(f"[{name}] ACTION: Closing SHORT position.") - self.send_order({**order_data, "action": "market_close", "is_buy": True, "size": size}) + logging.warning(f"[{name}] ACTION: Closing SHORT position for {coin}.") + # A "market_close" for a SHORT is a BUY order + self.send_order(agent_name, "market_close", coin, True, size, limit_px=limit_px) log_trade(strategy=name, coin=coin, action="CLOSE_SHORT", price=signal_price, size=size, signal=desired_signal) + # --- NEW: Handle leverage update signals --- + elif desired_signal == "UPDATE_LEVERAGE_LONG": + logging.warning(f"[{name}] ACTION: Updating LONG leverage for {coin} to {size}x") + # 'size' field holds the leverage value for this signal + self.send_order(agent_name, "update_leverage", coin, True, size) + + elif desired_signal == "UPDATE_LEVERAGE_SHORT": + logging.warning(f"[{name}] ACTION: Updating SHORT leverage for {coin} to {size}x") + # 'size' field holds the leverage value for this signal + self.send_order(agent_name, "update_leverage", coin, False, size) + else: - logging.warning(f"[{name}] Received unhandled signal '{desired_signal}'. No action taken.") + logging.warning(f"[{name}] Received unknown signal '{desired_signal}'. No action taken.") + + # --- REMOVED: _save_managed_positions() --- except Exception as e: logging.error(f"An error occurred in the position manager loop: {e}", exc_info=True) time.sleep(1) +# This script is no longer run directly, but is called by main_app.py + diff --git a/strategies/copy_trader_strategy.py b/strategies/copy_trader_strategy.py index e2aaac4..95443bb 100644 --- a/strategies/copy_trader_strategy.py +++ b/strategies/copy_trader_strategy.py @@ -11,12 +11,16 @@ from strategies.base_strategy import BaseStrategy class CopyTraderStrategy(BaseStrategy): """ An event-driven strategy that monitors a target wallet address and - copies its trades for a specific set of allowed coins, using - per-coin size and leverage settings. + copies its trades for a specific set of allowed coins. - This strategy is STATEFUL and tracks its own positions. + This strategy is STATELESS. It translates a target's fill direction + (e.g., "Open Long") directly into an explicit signal + (e.g., "OPEN_LONG") for the PositionManager. """ def __init__(self, strategy_name: str, params: dict, trade_signal_queue, shared_status: dict = None): + # --- MODIFIED: Pass the correct queue to the parent --- + # The event-driven copy trader should send orders to the order_execution_queue + # We will assume the queue passed in is the correct one (as setup in main_app.py) super().__init__(strategy_name, params, trade_signal_queue, shared_status) self.target_address = self.params.get("target_address", "").lower() @@ -33,9 +37,9 @@ class CopyTraderStrategy(BaseStrategy): self.info = None # Will be initialized in the run loop - # --- MODIFIED: Load and manage its own position state --- - self.position_state_file = os.path.join("_data", f"strategy_state_{self.strategy_name}.json") - self.current_positions = self._load_position_state() + # --- REMOVED: All local state management --- + # self.position_state_file = ... + # self.current_positions = ... # --- MODIFIED: Check if shared_status is None before using it --- if self.shared_status is None: @@ -48,26 +52,9 @@ class CopyTraderStrategy(BaseStrategy): self.start_time_utc = datetime.now(timezone.utc) logging.info(f"Strategy initialized. Ignoring all trades before {self.start_time_utc.isoformat()}") - logging.info(f"Loaded positions: {self.current_positions}") - def _load_position_state(self) -> dict: - """Loads the strategy's current open positions from a file.""" - if os.path.exists(self.position_state_file): - try: - with open(self.position_state_file, 'r') as f: - logging.info(f"Loading existing position state from {self.position_state_file}") - return json.load(f) - except (IOError, json.JSONDecodeError): - logging.warning(f"Could not read position state file {self.position_state_file}. Starting fresh.") - return {} # { "ETH": {"side": "long", "size": 0.01, "entry": 3000}, ... } - - def _save_position_state(self): - """Saves the strategy's current open positions to a file.""" - try: - with open(self.position_state_file, 'w') as f: - json.dump(self.current_positions, f, indent=4) - except IOError as e: - logging.error(f"Failed to save position state: {e}") + # --- REMOVED: _load_position_state --- + # --- REMOVED: _save_position_state --- def calculate_signals(self, df): # This strategy is event-driven, so it does not use polling-based signal calculation. @@ -76,17 +63,19 @@ class CopyTraderStrategy(BaseStrategy): def send_explicit_signal(self, signal: str, coin: str, price: float, trade_params: dict, size: float): """Helper to send a formatted signal to the PositionManager.""" config = { + # --- MODIFIED: Ensure agent is read from params --- "agent": self.params.get("agent"), "parameters": trade_params } + # --- MODIFIED: Use self.trade_signal_queue (which is the queue passed in) --- self.trade_signal_queue.put({ "strategy_name": self.strategy_name, "signal": signal, # e.g., "OPEN_LONG", "CLOSE_SHORT" "coin": coin, "signal_price": price, "config": config, - "size": size # Explicitly pass size + "size": size # Explicitly pass size (or leverage for leverage updates) }) logging.info(f"Explicit signal SENT: {signal} {coin} @ {price}, Size: {size}") @@ -96,23 +85,45 @@ class CopyTraderStrategy(BaseStrategy): every time the monitored address has an event. """ try: + # --- NEW: Add logging to see ALL messages --- + logging.debug(f"Received WebSocket message: {message}") + channel = message.get("channel") if channel not in ("user", "userFills", "userEvents"): + # --- NEW: Added debug logging --- + logging.debug(f"Ignoring message from unhandled channel: {channel}") return data = message.get("data") if not data: + # --- NEW: Added debug logging --- + logging.debug("Message received with no 'data' field. Ignoring.") return - fills = data.get("fills", []) - if not fills: - return - + # --- NEW: Check for user address FIRST --- user_address = data.get("user", "").lower() - - if user_address != self.target_address: + if not user_address: + logging.debug("Received message with 'data' but no 'user'. Ignoring.") return + # --- MODIFIED: Check for 'fills' vs. other event types --- + # This check is still valid for userFills + if "fills" not in data or not data.get("fills"): + # This is a userEvent, but not a fill (e.g., order placement, cancel, withdrawal) + event_type = data.get("type") # e.g., 'order', 'cancel', 'withdrawal' + if event_type: + logging.debug(f"Received non-fill user event: '{event_type}'. Ignoring.") + else: + logging.debug(f"Received 'data' message with no 'fills'. Ignoring.") + return + + # --- This line is now safe to run --- + if user_address != self.target_address: + # This shouldn't happen if the subscription is correct, but good to check + logging.warning(f"Received fill for wrong user: {user_address}") + return + + fills = data.get("fills") logging.debug(f"Received {len(fills)} fill(s) for user {user_address}") for fill in fills: @@ -125,71 +136,108 @@ class CopyTraderStrategy(BaseStrategy): coin = fill.get('coin').upper() if coin in self.allowed_coins: - side = fill.get('side') price = float(fill.get('px')) - fill_size = float(fill.get('sz')) - # Get our strategy's configured trade size for this coin - coin_config = self.coins_to_copy.get(coin) - if not coin_config or not coin_config.get("size"): - logging.warning(f"No trade size specified for {coin}. Ignoring fill.") + # --- MODIFIED: Use the target's fill size --- + fill_size = float(fill.get('sz')) # Target's size + + if fill_size == 0: + logging.warning(f"Ignoring fill with size 0.") continue - - strategy_trade_size = coin_config.get("size") + # --- NEW: Get the fill direction --- + # "dir": "Open Long", "Close Long", "Open Short", "Close Short" + fill_direction = fill.get("dir") + + # --- NEW: Get startPosition to calculate flip sizes --- + start_pos_size = float(fill.get('startPosition', 0.0)) + + if not fill_direction: + logging.warning(f"Fill message missing 'dir'. Ignoring fill: {fill}") + continue + + # Get our strategy's configured leverage for this coin + coin_config = self.coins_to_copy.get(coin) + + # --- REMOVED: Check for coin_config.get("size") --- + # --- REMOVED: strategy_trade_size = coin_config.get("size") --- + # Prepare config for the signal trade_params = self.params.copy() - trade_params.update(coin_config) + if coin_config: + trade_params.update(coin_config) - # Get our current position state for this coin - current_local_pos = self.current_positions.get(coin) - current_local_side = current_local_pos.get("side") if current_local_pos else None + # --- REMOVED: All stateful logic (current_local_pos, etc.) --- + # --- MODIFIED: Expanded logic to handle flip directions --- signal_sent = False - if side == "B": # Target bought - if current_local_side == "short": - # Flip: Close short, then open long - logging.warning(f"[{coin}] Target BOUGHT, we are SHORT. Flipping to LONG.") - self.send_explicit_signal("CLOSE_SHORT", coin, price, trade_params, current_local_pos.get("size")) - self.send_explicit_signal("OPEN_LONG", coin, price, trade_params, strategy_trade_size) - self.current_positions[coin] = {"side": "long", "size": strategy_trade_size, "entry": price} - signal_sent = True - elif current_local_side is None: - # New: Open long - logging.warning(f"[{coin}] Target BOUGHT, we are FLAT. Opening LONG.") - self.send_explicit_signal("OPEN_LONG", coin, price, trade_params, strategy_trade_size) - self.current_positions[coin] = {"side": "long", "size": strategy_trade_size, "entry": price} - signal_sent = True - else: # We are already long - logging.info(f"[{coin}] Target BOUGHT, we are already LONG. Ignoring.") - - elif side == "A": # Target sold - if current_local_side == "long": - # Flip: Close long, then open short - logging.warning(f"[{coin}] Target SOLD, we are LONG. Flipping to SHORT.") - self.send_explicit_signal("CLOSE_LONG", coin, price, trade_params, current_local_pos.get("size")) - self.send_explicit_signal("OPEN_SHORT", coin, price, trade_params, strategy_trade_size) - self.current_positions[coin] = {"side": "short", "size": strategy_trade_size, "entry": price} - signal_sent = True - elif current_local_side is None: - # New: Open short - logging.warning(f"[{coin}] Target SOLD, we are FLAT. Opening SHORT.") - self.send_explicit_signal("OPEN_SHORT", coin, price, trade_params, strategy_trade_size) - self.current_positions[coin] = {"side": "short", "size": strategy_trade_size, "entry": price} - signal_sent = True - else: # We are already short - logging.info(f"[{coin}] Target SOLD, we are already SHORT. Ignoring.") + dashboard_signal = "" + if fill_direction == "Open Long": + logging.warning(f"[{coin}] Target action: {fill_direction}. Sending signal: OPEN_LONG") + self.send_explicit_signal("OPEN_LONG", coin, price, trade_params, fill_size) + signal_sent = True + dashboard_signal = "OPEN_LONG" + + elif fill_direction == "Close Long": + logging.warning(f"[{coin}] Target action: {fill_direction}. Sending signal: CLOSE_LONG") + self.send_explicit_signal("CLOSE_LONG", coin, price, trade_params, fill_size) + signal_sent = True + dashboard_signal = "CLOSE_LONG" + + elif fill_direction == "Open Short": + logging.warning(f"[{coin}] Target action: {fill_direction}. Sending signal: OPEN_SHORT") + self.send_explicit_signal("OPEN_SHORT", coin, price, trade_params, fill_size) + signal_sent = True + dashboard_signal = "OPEN_SHORT" + + elif fill_direction == "Close Short": + logging.warning(f"[{coin}] Target action: {fill_direction}. Sending signal: CLOSE_SHORT") + self.send_explicit_signal("CLOSE_SHORT", coin, price, trade_params, fill_size) + signal_sent = True + dashboard_signal = "CLOSE_SHORT" + + elif fill_direction == "Short > Long": + logging.warning(f"[{coin}] Target action: {fill_direction}. Sending CLOSE_SHORT then OPEN_LONG.") + close_size = abs(start_pos_size) + open_size = fill_size - close_size + + if close_size > 0: + self.send_explicit_signal("CLOSE_SHORT", coin, price, trade_params, close_size) + + if open_size > 0: + self.send_explicit_signal("OPEN_LONG", coin, price, trade_params, open_size) + + signal_sent = True + dashboard_signal = "FLIP_TO_LONG" + + elif fill_direction == "Long > Short": + logging.warning(f"[{coin}] Target action: {fill_direction}. Sending CLOSE_LONG then OPEN_SHORT.") + close_size = abs(start_pos_size) + open_size = fill_size - close_size + + if close_size > 0: + self.send_explicit_signal("CLOSE_LONG", coin, price, trade_params, close_size) + + if open_size > 0: + self.send_explicit_signal("OPEN_SHORT", coin, price, trade_params, open_size) + + signal_sent = True + dashboard_signal = "FLIP_TO_SHORT" + + if signal_sent: # Update dashboard status - self.current_signal = f"{side} @ {coin}" + self.current_signal = dashboard_signal # Show the action self.signal_price = price self.last_signal_change_utc = trade_time.isoformat() - # --- MODIFIED: Save BOTH status files --- + self.coin = coin # Update coin for dashboard + self.size = fill_size # Update size for dashboard self._save_status() # For dashboard - self._save_position_state() # For our internal tracking logging.info(f"Source trade logged: {json.dumps(fill)}") + else: + logging.info(f"[{coin}] Ignoring unhandled fill direction: {fill_direction}") else: logging.info(f"Ignoring fill for unmonitored coin: {coin}") @@ -203,9 +251,12 @@ class CopyTraderStrategy(BaseStrategy): try: logging.info("Connecting to Hyperliquid WebSocket...") self.info = Info(constants.MAINNET_API_URL, skip_ws=False) + + # --- MODIFIED: Reverted to 'userFills' as requested --- subscription = {"type": "userFills", "user": self.target_address} self.info.subscribe(subscription, self.on_fill_message) logging.info(f"Subscribed to 'userFills' for target address: {self.target_address}") + return True except Exception as e: logging.error(f"Failed to connect or subscribe: {e}") @@ -217,8 +268,6 @@ class CopyTraderStrategy(BaseStrategy): This method overrides the default polling loop. It establishes a persistent WebSocket connection and runs a watchdog to ensure it stays connected. - - It also catches KeyboardInterrupt to gracefully shut down positions. """ try: if not self._connect_and_subscribe(): @@ -226,6 +275,40 @@ class CopyTraderStrategy(BaseStrategy): time.sleep(60) return + # --- MODIFIED: Add a small delay to ensure Info object is ready for REST calls --- + logging.info("Connection established. Waiting 2 seconds for Info client to be ready...") + time.sleep(2) + # --- END MODIFICATION --- + + # --- NEW: Set initial leverage for all monitored coins --- + logging.info("Setting initial leverage for all monitored coins...") + try: + all_mids = self.info.all_mids() + for coin_key, coin_config in self.coins_to_copy.items(): + coin = coin_key.upper() + # Use a failsafe price of 1.0 if coin not in mids (e.g., new listing) + current_price = float(all_mids.get(coin, 1.0)) + + leverage_long = coin_config.get('leverage_long', 2) + leverage_short = coin_config.get('leverage_short', 2) + + # Prepare config for the signal + trade_params = self.params.copy() + trade_params.update(coin_config) + + # Send LONG leverage update + # The 'size' param is used to pass the leverage value for this signal type + self.send_explicit_signal("UPDATE_LEVERAGE_LONG", coin, current_price, trade_params, leverage_long) + + # Send SHORT leverage update + self.send_explicit_signal("UPDATE_LEVERAGE_SHORT", coin, current_price, trade_params, leverage_short) + + logging.info(f"Sent initial leverage signals for {coin} (Long: {leverage_long}x, Short: {leverage_short}x)") + + except Exception as e: + logging.error(f"Failed to set initial leverage: {e}", exc_info=True) + # --- END NEW LEVERAGE LOGIC --- + # Save the initial "WAIT" status self._save_status() @@ -253,36 +336,9 @@ class CopyTraderStrategy(BaseStrategy): except Exception as e: logging.error(f"An error occurred in the watchdog loop: {e}", exc_info=True) - except KeyboardInterrupt: # --- THIS IS THE GRACEFUL SHUTDOWN LOGIC --- - logging.warning(f"Shutdown signal received. Closing all open positions for '{self.strategy_name}'...") - - # Use a copy of the items to avoid runtime modification errors - for coin, position in list(self.current_positions.items()): - current_side = position.get("side") - trade_size = position.get("size") - - if not current_side or not trade_size: - continue - - # Find the config for this coin - coin_config = self.coins_to_copy.get(coin.upper(), {}) - trade_params = self.params.copy() - trade_params.update(coin_config) - - # Use the last entry price as a placeholder for the market close order - price = position.get("entry", 1) # Use 1 as a failsafe - - if current_side == "long": - logging.warning(f"Sending CLOSE_LONG for {coin}, {price}, {trade_size}...") - #self.send_explicit_signal("CLOSE_LONG", coin, price, trade_params, trade_size) - #del self.current_positions[coin] # Assume it will close - elif current_side == "short": - logging.warning(f"Sending CLOSE_SHORT for {coin}, {price}, {trade_size} ...") - #self.send_explicit_signal("CLOSE_SHORT", coin, price, trade_params, trade_size) - #del self.current_positions[coin] # Assume it will close - - self._save_position_state() # Save the new empty state - logging.info("All closing signals sent. Exiting strategy.") + except KeyboardInterrupt: + # --- MODIFIED: No positions to close, just exit --- + logging.warning(f"Shutdown signal received. Exiting strategy '{self.strategy_name}'.") except Exception as e: logging.error(f"An unhandled error occurred in run_event_loop: {e}", exc_info=True) diff --git a/trade_executor.py b/trade_executor.py index 2de81c7..44c6e13 100644 --- a/trade_executor.py +++ b/trade_executor.py @@ -142,7 +142,7 @@ class TradeExecutor: # --- NEW: STATE UPDATE ON SUCCESS --- if response.get("status") == "ok": - response_data = response.get("response", {}).get("data", {}) + response_data = response.get("response", {},).get("data", {}) if response_data and "statuses" in response_data: # Check if the order status contains an error if "error" not in response_data["statuses"][0]: @@ -155,7 +155,9 @@ class TradeExecutor: "side": "long" if is_buy else "short", "open_time_utc": order['open_time_utc'], "open_price": order['open_price'], - "amount": order['amount'] + "amount": order['amount'], + # --- MODIFIED: Read leverage from the order --- + "leverage": order.get('leverage') } logging.info(f"Successfully opened position {position_key}. Saving state.") elif action == "market_close": From 596fcde0bf08057d897631628ccb0f5d7b33be66 Mon Sep 17 00:00:00 2001 From: DiTus Date: Tue, 4 Nov 2025 13:34:49 +0100 Subject: [PATCH 13/18] bid, ask, last traded price --- live_market_utils.py | 131 ++++++++++++++++++++++------ main_app.py | 198 +++++++++++++++++++++++-------------------- 2 files changed, 214 insertions(+), 115 deletions(-) diff --git a/live_market_utils.py b/live_market_utils.py index ae0eab3..bab9129 100644 --- a/live_market_utils.py +++ b/live_market_utils.py @@ -3,6 +3,7 @@ import json import time import os import traceback +import sys from hyperliquid.info import Info from hyperliquid.utils import constants @@ -28,38 +29,114 @@ def log_error(error_message: str, include_traceback: bool = True): except Exception: print(f"CRITICAL: Failed to write to error log file: {error_message}", file=sys.stderr) + def on_message(message, shared_prices_dict): """ - Callback function to process incoming 'allMids' messages and update the - shared memory dictionary directly. + Callback function to process incoming WebSocket messages for 'bbo' and 'trades' + and update the shared memory dictionary. """ try: - if message.get("channel") == "allMids": - new_prices = message.get("data", {}).get("mids", {}) - shared_prices_dict.update(new_prices) + logging.debug(f"Received WebSocket message: {message}") + channel = message.get("channel") + + # --- Parser 1: Handle Best Bid/Offer messages --- + if channel == "bbo": + data = message.get("data") + if not data: + logging.warning("BBO message received with no data.") + return + + coin = data.get("coin") + if not coin: + logging.warning("BBO data received with no coin identifier.") + return + + bid_ask_data = data.get("bbo") + + if not bid_ask_data or not isinstance(bid_ask_data, list) or len(bid_ask_data) < 2: + logging.warning(f"[{coin}] Received BBO message with invalid 'bbo' array: {bid_ask_data}") + return + + try: + bid_price_str = bid_ask_data[0].get('px') + ask_price_str = bid_ask_data[1].get('px') + + if not bid_price_str or not ask_price_str: + logging.warning(f"[{coin}] BBO data missing 'px' field.") + return + + bid_price = float(bid_price_str) + ask_price = float(ask_price_str) + + # Update the shared dictionary for Bid and Ask + shared_prices_dict[f"{coin}_bid"] = bid_price + shared_prices_dict[f"{coin}_ask"] = ask_price + + logging.info(f"Updated {coin} (BBO): Bid={bid_price:.4f}, Ask={ask_price:.4f}") + + except (ValueError, TypeError, IndexError) as e: + logging.error(f"[{coin}] Error parsing BBO data: {e}. Data: {bid_ask_data}") + + # --- Parser 2: Handle Live Trade messages --- + elif channel == "trades": + trade_list = message.get("data") + + if not trade_list or not isinstance(trade_list, list) or len(trade_list) == 0: + logging.warning(f"Received 'trades' message with invalid data: {trade_list}") + return + + # Process all trades in the batch + for trade in trade_list: + try: + coin = trade.get("coin") + price_str = trade.get("px") + + if not coin or not price_str: + logging.warning(f"Trade data missing 'coin' or 'px': {trade}") + continue + + price = float(price_str) + + # Update the shared dictionary for the "Live Price" column + shared_prices_dict[coin] = price + + logging.info(f"Updated {coin} (Live Price) to last trade: {price:.4f}") + + except (ValueError, TypeError) as e: + logging.error(f"Error parsing trade data: {e}. Data: {trade}") + except Exception as e: log_error(f"Error in WebSocket on_message: {e}") -def start_live_feed(shared_prices_dict, log_level='off'): +def start_live_feed(shared_prices_dict, coins_to_watch: list, log_level='off'): """ - Main function for the WebSocket process. It takes a shared dictionary - and continuously feeds it with live market data. - Includes a watchdog to auto-reconnect on failure. + Main function for the WebSocket process. + Subscribes to BOTH 'bbo' and 'trades' for all watched coins. """ - setup_logging(log_level, 'LiveMarketFeed') + setup_logging(log_level, 'LiveMarketFeed_Combined') info = None callback = lambda msg: on_message(msg, shared_prices_dict) def connect_and_subscribe(): - """Establishes a new WebSocket connection and subscribes to allMids.""" + """Establishes a new WebSocket connection and subscribes to both streams.""" try: logging.info("Connecting to Hyperliquid WebSocket...") - # Ensure skip_ws=False to create the ws_manager new_info = Info(constants.MAINNET_API_URL, skip_ws=False) - subscription = {"type": "allMids"} - new_info.subscribe(subscription, callback) - logging.info("WebSocket connected and subscribed to 'allMids'.") + + # --- MODIFIED: Subscribe to 'bbo' AND 'trades' for each coin --- + for coin in coins_to_watch: + # Subscribe to Best Bid/Offer + bbo_sub = {"type": "bbo", "coin": coin} + new_info.subscribe(bbo_sub, callback) + logging.info(f"Subscribed to 'bbo' for {coin}.") + + # Subscribe to Live Trades + trades_sub = {"type": "trades", "coin": coin} + new_info.subscribe(trades_sub, callback) + logging.info(f"Subscribed to 'trades' for {coin}.") + + logging.info("WebSocket connected and all subscriptions sent.") return new_info except Exception as e: log_error(f"Failed to connect to WebSocket: {e}") @@ -67,24 +144,28 @@ def start_live_feed(shared_prices_dict, log_level='off'): info = connect_and_subscribe() - logging.info("Starting live price feed process. Press Ctrl+C in main app to stop.") + if info is None: + logging.critical("Initial WebSocket connection failed. Exiting process.") + log_error("Initial WebSocket connection failed. Exiting process.", include_traceback=False) + time.sleep(10) # Wait before letting the process manager restart it + return + + logging.info("Starting Combined (BBO + Trades) live price feed process.") try: while True: # --- Watchdog Logic --- time.sleep(15) # Check the connection every 15 seconds - # --- FIX: Changed 'is_running()' to the correct method 'is_alive()' --- - if info is None or not info.ws_manager.is_alive(): - error_msg = "WebSocket connection lost or not running. Attempting to reconnect..." + if not info.ws_manager.is_alive(): + error_msg = "WebSocket connection lost. Attempting to reconnect..." logging.warning(error_msg) log_error(error_msg, include_traceback=False) # Log it to the file - if info and info.ws_manager: # Check if ws_manager exists before stopping - try: - info.ws_manager.stop() # Clean up old manager - except Exception as e: - log_error(f"Error stopping old ws_manager: {e}") + try: + info.ws_manager.stop() # Clean up old manager + except Exception as e: + log_error(f"Error stopping old ws_manager: {e}") info = connect_and_subscribe() @@ -102,5 +183,5 @@ def start_live_feed(shared_prices_dict, log_level='off'): finally: if info and info.ws_manager: info.ws_manager.stop() - logging.info("Listener stopped.") + logging.info("Combined Listener stopped.") diff --git a/main_app.py b/main_app.py index f64f7ce..345013d 100644 --- a/main_app.py +++ b/main_app.py @@ -23,12 +23,12 @@ from strategies.base_strategy import BaseStrategy WATCHED_COINS = ["BTC", "ETH", "SOL", "BNB", "HYPE", "ASTER", "ZEC", "PUMP", "SUI"] LIVE_CANDLE_FETCHER_SCRIPT = "live_candle_fetcher.py" RESAMPLER_SCRIPT = "resampler.py" -MARKET_CAP_FETCHER_SCRIPT = "market_cap_fetcher.py" +# --- REMOVED: Market Cap Fetcher --- # --- REMOVED: trade_executor.py is no longer a script --- DASHBOARD_DATA_FETCHER_SCRIPT = "dashboard_data_fetcher.py" STRATEGY_CONFIG_FILE = os.path.join("_data", "strategies.json") DB_PATH = os.path.join("_data", "market_data.db") -MARKET_CAP_SUMMARY_FILE = os.path.join("_data", "market_cap_data.json") +# --- REMOVED: Market Cap File --- LOGS_DIR = "_logs" TRADE_EXECUTOR_STATUS_FILE = os.path.join(LOGS_DIR, "trade_executor_status.json") @@ -145,51 +145,9 @@ def resampler_scheduler(timeframes_to_generate: list): logging.info("ResamplerScheduler shutting down.") -def run_market_cap_fetcher_job(): - """Defines the job for the market cap fetcher, redirecting output.""" - log_file = os.path.join(LOGS_DIR, "market_cap_fetcher.log") - try: - command = [sys.executable, MARKET_CAP_FETCHER_SCRIPT, "--log-level", "off"] - with open(log_file, 'a') as f: - f.write(f"\n--- Starting {MARKET_CAP_FETCHER_SCRIPT} job at {datetime.now()} ---\n") - subprocess.run(command, check=True, stdout=f, stderr=subprocess.STDOUT) - except Exception as e: - with open(log_file, 'a') as f: - f.write(f"\n--- SCHEDULER ERROR at {datetime.now()} ---\n") - f.write(f"Failed to run {MARKET_CAP_FETCHER_SCRIPT} job: {e}\n") +# --- REMOVED: run_market_cap_fetcher_job function --- - -def market_cap_fetcher_scheduler(): - """Schedules the market_cap_fetcher.py script to run daily at a specific UTC time.""" - - # --- GRACEFUL SHUTDOWN HANDLER --- - import signal - shutdown_requested = False - - def handle_shutdown_signal(signum, frame): - nonlocal shutdown_requested - try: - logging.info(f"Shutdown signal ({signum}) received. Exiting loop...") - except NameError: - print(f"[MarketCapScheduler] Shutdown signal ({signum}) received. Exiting loop...") - shutdown_requested = True - - signal.signal(signal.SIGTERM, handle_shutdown_signal) - signal.signal(signal.SIGINT, handle_shutdown_signal) - # --- END GRACEFUL SHUTDOWN HANDLER --- - - setup_logging('off', 'MarketCapScheduler') - schedule.every().day.at("00:15", "UTC").do(run_market_cap_fetcher_job) - - while not shutdown_requested: # <-- MODIFIED - schedule.run_pending() - # Sleep for 60 seconds, but check for shutdown flag every second - for _ in range(60): - if shutdown_requested: - break - time.sleep(1) - - logging.info("MarketCapScheduler shutting down.") +# --- REMOVED: market_cap_fetcher_scheduler function --- def run_trade_executor(order_execution_queue: multiprocessing.Queue): @@ -390,7 +348,7 @@ class MainApp: self.watched_coins = coins_to_watch self.shared_prices = shared_prices self.prices = {} - self.market_caps = {} + # --- REMOVED: self.market_caps --- self.open_positions = {} self.background_processes = processes self.process_status = {} @@ -400,23 +358,12 @@ class MainApp: def read_prices(self): """Reads the latest prices directly from the shared memory dictionary.""" try: - self.prices = dict(self.shared_prices) + # --- FIX: Use .copy() for thread-safe iteration --- + self.prices = self.shared_prices.copy() except Exception as e: - logging.debug("Could not read from shared prices dict: {e}") + logging.debug(f"Could not read from shared prices dict: {e}") - def read_market_caps(self): - """Reads the latest market cap summary from its JSON file.""" - if os.path.exists(MARKET_CAP_SUMMARY_FILE): - try: - with open(MARKET_CAP_SUMMARY_FILE, 'r', encoding='utf-8') as f: - summary_data = json.load(f) - - for coin in self.watched_coins: - table_key = f"{coin}_market_cap" - if table_key in summary_data: - self.market_caps[coin] = summary_data[table_key].get('market_cap') - except (json.JSONDecodeError, IOError): - logging.debug("Could not read market cap summary file.") + # --- REMOVED: read_market_caps method --- def read_strategy_statuses(self): """Reads the status JSON file for each enabled strategy.""" @@ -439,7 +386,9 @@ class MainApp: if os.path.exists(TRADE_EXECUTOR_STATUS_FILE): try: with open(TRADE_EXECUTOR_STATUS_FILE, 'r', encoding='utf-8') as f: - self.open_positions = json.load(f) + # --- FIX: Read the 'open_positions' key from the file --- + status_data = json.load(f) + self.open_positions = status_data.get('open_positions', {}) except (IOError, json.JSONDecodeError): logging.debug("Could not read trade executor status file.") else: @@ -450,32 +399,59 @@ class MainApp: for name, process in self.background_processes.items(): self.process_status[name] = "Running" if process.is_alive() else "STOPPED" + def _format_price(self, price_val, width=10): + """Helper function to format prices for the dashboard.""" + try: + price_float = float(price_val) + if price_float < 1: + price_str = f"{price_float:>{width}.6f}" + elif price_float < 100: + price_str = f"{price_float:>{width}.4f}" + else: + price_str = f"{price_float:>{width}.2f}" + except (ValueError, TypeError): + price_str = f"{'Loading...':>{width}}" + return price_str + def display_dashboard(self): """Displays a formatted dashboard with side-by-side tables.""" print("\x1b[H\x1b[J", end="") # Clear screen left_table_lines = ["--- Market Dashboard ---"] - left_table_width = 44 + # --- MODIFIED: Adjusted width for new columns --- + left_table_width = 65 left_table_lines.append("-" * left_table_width) - left_table_lines.append(f"{'#':<2} | {'Coin':^6} | {'Live Price':>10} | {'Market Cap':>15} |") + # --- MODIFIED: Replaced Market Cap with Gap --- + left_table_lines.append(f"{'#':<2} | {'Coin':^6} | {'Best Bid':>10} | {'Live Price':>10} | {'Best Ask':>10} | {'Gap':>10} |") left_table_lines.append("-" * left_table_width) for i, coin in enumerate(self.watched_coins, 1): - price_str = self.prices.get(coin, "Loading...") - # Format the price string - try: - price_float = float(price_str) - if price_float < 1: - price_str = f"{price_float:>10.6f}" - elif price_float < 100: - price_str = f"{price_float:>10.4f}" - else: - price_str = f"{price_float:>10.2f}" - except (ValueError, TypeError): - price_str = f"{'Loading...':>10}" + # --- MODIFIED: Fetch all three price types --- + mid_price = self.prices.get(coin, "Loading...") + bid_price = self.prices.get(f"{coin}_bid", "Loading...") + ask_price = self.prices.get(f"{coin}_ask", "Loading...") - market_cap = self.market_caps.get(coin) - formatted_mc = format_market_cap(market_cap) - left_table_lines.append(f"{i:<2} | {coin:^6} | {price_str} | {formatted_mc:>15} |") + # --- MODIFIED: Use the new formatting helper --- + formatted_mid = self._format_price(mid_price) + formatted_bid = self._format_price(bid_price) + formatted_ask = self._format_price(ask_price) + + # --- MODIFIED: Calculate gap --- + gap_str = f"{'Loading...':>10}" + try: + # Calculate the spread + gap_val = float(ask_price) - float(bid_price) + # Format gap with high precision, similar to price + if gap_val < 1: + gap_str = f"{gap_val:>{10}.6f}" + else: + gap_str = f"{gap_val:>{10}.4f}" + except (ValueError, TypeError): + pass # Keep 'Loading...' + + # --- REMOVED: Market Cap logic --- + + # --- MODIFIED: Print all price columns including gap --- + left_table_lines.append(f"{i:<2} | {coin:^6} | {formatted_bid} | {formatted_mid} | {formatted_ask} | {gap_str} |") left_table_lines.append("-" * left_table_width) right_table_lines = ["--- Strategy Status ---"] @@ -502,10 +478,13 @@ class MainApp: coin = status.get('coin', config_params.get('coin', 'N/A')) # --- FIX: Handle nested 'coins_to_copy' logic for size --- - if 'coins_to_copy' in config_params: - size = status.get('size', 'Multi') - else: - size = config_params.get('size', 'N/A') + # --- MODIFIED: Read 'size' from status first, then config, then 'Multi' --- + size = status.get('size') + if not size: + if 'coins_to_copy' in config_params: + size = 'Multi' + else: + size = config_params.get('size', 'N/A') timeframe = config_params.get('timeframe', 'N/A') @@ -515,10 +494,16 @@ class MainApp: size_display = f"{size:>8}" if isinstance(size, (int, float)): - size_display = f"{size:>8.4f}" # Format size to 4 decimal places + # --- MODIFIED: More flexible size formatting --- + if size < 0.0001: + size_display = f"{size:>8.6f}" + elif size < 1: + size_display = f"{size:>8.4f}" + else: + size_display = f"{size:>8.2f}" # --- END NEW LOGIC --- - right_table_lines.append(f"{i:^2} | {name:<25} | {coin:^6} | {signal:^8} | {price_display:>12} | {last_change_display:>17} | {timeframe:^5} | {size:>8} |") + right_table_lines.append(f"{i:^2} | {name:<25} | {coin:^6} | {signal:^8} | {price_display:>12} | {last_change_display:>17} | {timeframe:^5} | {size_display} |") right_table_lines.append("-" * right_table_width) output_lines = [] @@ -536,7 +521,32 @@ class MainApp: output_lines.append(f"{'Account':<10} | {'Coin':<6} | {'Size':>15} | {'Entry Price':>12} | {'Mark Price':>12} | {'PNL':>15} | {'Leverage':>10} |") output_lines.append("-" * pos_table_width) - # --- REMOVED: Background Processes section --- + # --- FIX: Correctly read and display open positions --- + if not self.open_positions: + output_lines.append(f"{'No open positions.':^{pos_table_width}}") + else: + for account, positions in self.open_positions.items(): + if not positions: + continue + for coin, pos in positions.items(): + try: + size_f = float(pos.get('size', 0)) + entry_f = float(pos.get('entry_price', 0)) + mark_f = float(self.prices.get(coin, 0)) + pnl_f = (mark_f - entry_f) * size_f if size_f > 0 else (entry_f - mark_f) * abs(size_f) + lev = pos.get('leverage', 1) + + size_str = f"{size_f:>{15}.5f}" + entry_str = f"{entry_f:>{12}.2f}" + mark_str = f"{mark_f:>{12}.2f}" + pnl_str = f"{pnl_f:>{15}.2f}" + lev_str = f"{lev}x" + + output_lines.append(f"{account:<10} | {coin:<6} | {size_str} | {entry_str} | {mark_str} | {pnl_str} | {lev_str:>10} |") + except (ValueError, TypeError): + output_lines.append(f"{account:<10} | {coin:<6} | {'Error parsing data...':^{pos_table_width-20}} |") + + output_lines.append("-" * pos_table_width) final_output = "\n".join(output_lines) print(final_output) @@ -546,7 +556,7 @@ class MainApp: """Main loop to read data, display dashboard, and check processes.""" while True: self.read_prices() - self.read_market_caps() + # --- REMOVED: self.read_market_caps() --- self.read_strategy_statuses() self.read_executor_status() # --- REMOVED: self.check_process_status() --- @@ -584,10 +594,16 @@ if __name__ == "__main__": # --- REVERTED: All processes are daemon=True and in one dict --- - processes["Live Market Feed"] = multiprocessing.Process(target=start_live_feed, args=(shared_prices, 'off'), daemon=True) + # --- FIX: Pass WATCHED_COINS to the start_live_feed process --- + # --- MODIFICATION: Set log level back to 'off' --- + processes["Live Market Feed"] = multiprocessing.Process( + target=start_live_feed, + args=(shared_prices, WATCHED_COINS, 'off'), + daemon=True + ) processes["Live Candle Fetcher"] = multiprocessing.Process(target=run_live_candle_fetcher, daemon=True) processes["Resampler"] = multiprocessing.Process(target=resampler_scheduler, args=(list(required_timeframes),), daemon=True) - processes["Market Cap Fetcher"] = multiprocessing.Process(target=market_cap_fetcher_scheduler, daemon=True) + # --- REMOVED: Market Cap Fetcher Process --- processes["Dashboard Data"] = multiprocessing.Process(target=run_dashboard_data_fetcher, daemon=True) processes["Position Manager"] = multiprocessing.Process( @@ -665,3 +681,5 @@ if __name__ == "__main__": logging.info("Shutdown complete.") sys.exit(0) + + From 0210bc93bc9094c7545fbbf98b527d70b1d59c7c Mon Sep 17 00:00:00 2001 From: ditus Date: Sun, 9 Nov 2025 18:43:59 +0000 Subject: [PATCH 14/18] Delete _data/market_data.db-shm --- _data/market_data.db-shm | Bin 32768 -> 0 bytes 1 file changed, 0 insertions(+), 0 deletions(-) delete mode 100644 _data/market_data.db-shm diff --git a/_data/market_data.db-shm b/_data/market_data.db-shm deleted file mode 100644 index 708dc2e284cfc1697a4260fdd4ef6bdfc6ba2587..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 32768 zcmeI*S5K316vy!|YTW|`6*ulZaN^#3FGSoE7j95-PZT$9@dbEcOiWC?B6{zIw??BE z3U0;m0`wHq#2CiYl9u-A?@3PE^f~AJ&-t|1ZNi)0c(0@p5CgHd?e(uB4~c2%_fyid zpJ(2SPtACm{475&H||kdRzZTF`6bQm!Q1!uKbP-)-*b_j`N`t7#b=9`7T+yCSbVW~ zXYtMAqs3Q?7Z!yUZ!L-~Y<8a9_3++a|FH14pSM2eEG2z^TD`Atp#8k_mDb05PVuY1 zyLz4d*|)TQUbCv`^Ptc}zMWlQQP!^i&-T!2?pXd~3n!m%F!uF*-}Ed22}nQ!5|Drd zBp?9^NI(J-kbndvAOQ(TKmrnwfCMBU0SQPz0uqpb1SB8<2}nQ!5|DrdBp?9^NI(J- zkbndvAOQ(TKmrnwfCMBU0SQPz0+A*VOBa%v!E$zSy5x5ZB5i|E&D(&E3}Pxv*v?6A zgzDHzoYR>046j7J`%7!ml0J-M0qZ%$CHF5J@P-~*(3`Q$XB`K*7;v9J3%3SZsJb!* z;_1dPX0eh?a?8xpjj9*eU-csdVS=N7+s?(gFjAkxt*vt9g9;WUR2&sVmFru!I3uK4(vr2}nQ!5|DrdBp`wE1RBwq zAxvi(JIUczd8xW50hb9hblE;yM*_|TVrWWt=jqCnfQtm`(1s)?vWSfw<%)|o(<&0E zNFa``3}q%O*v*-Wl67AK4g|J2$O(^>TC}1s<5@^Lhq)XcLurbL6=)Jslh>mi1DMQW zHglY75jDGp4Nrjs;W<_*Q&poGJs8PsR*{AMS)wipM2$cV66nPk=CPK2T!@;zwWtK3 zHm&K$1X9?*5%NSLfhZEFPa*@ELK<5*!SyKGT&ucGpo8BF)uk={nM5j^IL1}K>s1iG G0)GHhR% Date: Sun, 9 Nov 2025 22:07:14 +0100 Subject: [PATCH 15/18] deleted --- GEMINI.md | 127 +++ IMPROVEMENT_ROADMAP.md | 300 ++++++ PROJECT_REVIEW_AND_PROPOSALS.md | 1 + _data/backtesting_conf.json | 2 +- _data/coin_id_map.json | 208 ++++ _data/coin_precision.json | 1 + _data/executor_managed_positions.json | 7 - _data/market_cap_data.json | 1058 ++++++++++++++++++- _data/opened_positions.json | 11 + _data/strategies.json | 37 +- _data/strategy_state_copy_trader_eth.json | 7 + _data/strategy_status_copy_trader_eth.json | 7 + _data/strategy_status_ma_cross_btc.json | 7 - _data/strategy_status_sma_125d_btc.json | 7 - _data/strategy_status_sma_125d_eth.json | 7 - _data/strategy_status_sma_44d_btc.json | 7 - _data/strategy_status_sma_5m_eth.json | 7 - _data/strategy_status_sma_cross.json | 7 - _data/strategy_status_sma_cross_1.json | 7 - _data/strategy_status_sma_cross_2.json | 6 +- _data/strategy_status_sma_cross_eth_5m.json | 7 - _data/wallets_info.json | 290 +++++ _data/wallets_to_track.json | 7 + position_logic/__init__.py | 2 + position_logic/base_logic.py | 31 + position_logic/default_flip_logic.py | 83 ++ review.md | 79 ++ 27 files changed, 2213 insertions(+), 107 deletions(-) create mode 100644 GEMINI.md create mode 100644 IMPROVEMENT_ROADMAP.md create mode 100644 PROJECT_REVIEW_AND_PROPOSALS.md create mode 100644 _data/coin_id_map.json delete mode 100644 _data/executor_managed_positions.json create mode 100644 _data/opened_positions.json create mode 100644 _data/strategy_state_copy_trader_eth.json create mode 100644 _data/strategy_status_copy_trader_eth.json delete mode 100644 _data/strategy_status_ma_cross_btc.json delete mode 100644 _data/strategy_status_sma_125d_btc.json delete mode 100644 _data/strategy_status_sma_125d_eth.json delete mode 100644 _data/strategy_status_sma_44d_btc.json delete mode 100644 _data/strategy_status_sma_5m_eth.json delete mode 100644 _data/strategy_status_sma_cross.json delete mode 100644 _data/strategy_status_sma_cross_1.json delete mode 100644 _data/strategy_status_sma_cross_eth_5m.json create mode 100644 _data/wallets_info.json create mode 100644 _data/wallets_to_track.json create mode 100644 position_logic/__init__.py create mode 100644 position_logic/base_logic.py create mode 100644 position_logic/default_flip_logic.py create mode 100644 review.md diff --git a/GEMINI.md b/GEMINI.md new file mode 100644 index 0000000..89eb87c --- /dev/null +++ b/GEMINI.md @@ -0,0 +1,127 @@ +# Project Overview + +This project is a sophisticated, multi-process automated trading bot for the Hyperliquid decentralized exchange. It is written in Python and uses a modular architecture to separate concerns like data fetching, strategy execution, and trade management. + +The bot uses a high-performance data pipeline with SQLite for storing market data. Trading strategies are defined and configured in a JSON file, allowing for easy adjustments without code changes. The system supports multiple, independent trading agents for risk segregation and PNL tracking. A live terminal dashboard provides real-time monitoring of market data, strategy signals, and the status of all background processes. + +## Building and Running + +### 1. Setup + +1. **Create and activate a virtual environment:** + ```bash + # For Windows + python -m venv .venv + .\.venv\Scripts\activate + + # For macOS/Linux + python3 -m venv .venv + source .venv/bin/activate + ``` + +2. **Install dependencies:** + ```bash + pip install -r requirements.txt + ``` + +3. **Configure environment variables:** + Create a `.env` file in the root of the project (you can copy `.env.example`) and add your Hyperliquid wallet private key and any agent keys. + +4. **Configure strategies:** + Edit `_data/strategies.json` to enable and configure your desired trading strategies. + +### 2. Running the Bot + +To run the main application, which includes the dashboard and all background processes, execute the following command: + +```bash +python main_app.py +``` + +## Development Conventions + +* **Modularity:** The project is divided into several scripts, each with a specific responsibility (e.g., `data_fetcher.py`, `trade_executor.py`). +* **Configuration-driven:** Strategies are defined in `_data/strategies.json`, not hardcoded. This allows for easy management of strategies. +* **Multi-processing:** The application uses the `multiprocessing` module to run different components in parallel for performance and stability. +* **Strategies:** Custom strategies should inherit from the `BaseStrategy` class (defined in `strategies/base_strategy.py`) and implement the `calculate_signals` method. +* **Documentation:** The `WIKI/` directory contains detailed documentation for the project. Start with `WIKI/SUMMARY.md`. + +# Project Review and Recommendations + +This review provides an analysis of the current state of the automated trading bot project, proposes specific code improvements, and identifies files that appear to be unused or are one-off utilities that could be reorganized. + +The project is a well-structured, multi-process Python application for crypto trading. It has a clear separation of concerns between data fetching, strategy execution, and trade management. The use of `multiprocessing` and a centralized `main_app.py` orchestrator is a solid architectural choice. + +The following sections detail recommendations for improving configuration management, code structure, and robustness, along with a list of files recommended for cleanup. + +--- + +## Proposed Code Changes + +### 1. Centralize Configuration + +- **Issue:** Key configuration variables like `WATCHED_COINS` and `required_timeframes` are hardcoded in `main_app.py`. This makes them difficult to change without modifying the source code. +- **Proposal:** + - Create a central configuration file, e.g., `_data/config.json`. + - Move `WATCHED_COINS` and `required_timeframes` into this new file. + - Load this configuration in `main_app.py` at startup. +- **Benefit:** Decouples configuration from code, making the application more flexible and easier to manage. + +### 2. Refactor `main_app.py` for Clarity + +- **Issue:** `main_app.py` is long and handles multiple responsibilities: process orchestration, dashboard rendering, and data reading. +- **Proposal:** + - **Abstract Process Management:** The functions for running subprocesses (e.g., `run_live_candle_fetcher`, `run_resampler_job`) contain repetitive logic for logging, shutdown handling, and process looping. This could be abstracted into a generic `ProcessRunner` class. + - **Create a Dashboard Class:** The complex dashboard rendering logic could be moved into a separate `Dashboard` class to improve separation of concerns and make the main application loop cleaner. +- **Benefit:** Improves code readability, reduces duplication, and makes the application easier to maintain and extend. + +### 3. Improve Project Structure + +- **Issue:** The root directory is cluttered with numerous Python scripts, making it difficult to distinguish between core application files, utility scripts, and old/example files. +- **Proposal:** + - Create a `scripts/` directory and move all one-off utility and maintenance scripts into it. + - Consider creating a `src/` or `app/` directory to house the core application source code (`main_app.py`, `trade_executor.py`, etc.), separating it clearly from configuration, data, and documentation. +- **Benefit:** A cleaner, more organized project structure that is easier for new developers to understand. + +### 4. Enhance Robustness and Error Handling + +- **Issue:** The agent loading in `trade_executor.py` relies on discovering environment variables by a naming convention (`_AGENT_PK`). This is clever but can be brittle if environment variables are named incorrectly. +- **Proposal:** + - Explicitly define the agent names and their corresponding environment variable keys in the proposed `_data/config.json` file. The `trade_executor` would then load only the agents specified in the configuration. +- **Benefit:** Makes agent configuration more explicit and less prone to errors from stray environment variables. + +--- + +## Identified Unused/Utility Files + +The following files were identified as likely being unused by the core application, being obsolete, or serving as one-off utilities. It is recommended to **move them to a `scripts/` directory** or **delete them** if they are obsolete. + +### Obsolete / Old Versions: +- `data_fetcher_old.py` +- `market_old.py` +- `base_strategy.py` (The one in the root directory; the one in `strategies/` is used). + +### One-Off Utility Scripts (Recommend moving to `scripts/`): +- `!migrate_to_sqlite.py` +- `import_csv.py` +- `del_market_cap_tables.py` +- `fix_timestamps.py` +- `list_coins.py` +- `create_agent.py` + +### Examples / Unused Code: +- `basic_ws.py` (Appears to be an example file). +- `backtester.py` +- `strategy_sma_cross.py` (A strategy file in the root, not in the `strategies` folder). +- `strategy_template.py` + +### Standalone / Potentially Unused Core Files: +The following files seem to have their logic already integrated into the main multi-process application. They might be remnants of a previous architecture and may not be needed as standalone scripts. +- `address_monitor.py` +- `position_monitor.py` +- `trade_log.py` +- `wallet_data.py` +- `whale_tracker.py` + +### Data / Log Files (Recommend archiving or deleting): +- `hyperliquid_wallet_data_*.json` (These appear to be backups or logs). \ No newline at end of file diff --git a/IMPROVEMENT_ROADMAP.md b/IMPROVEMENT_ROADMAP.md new file mode 100644 index 0000000..503fad1 --- /dev/null +++ b/IMPROVEMENT_ROADMAP.md @@ -0,0 +1,300 @@ +# Improvement Roadmap - Hyperliquid Trading Bot + +## Overview +This document outlines the detailed implementation plan for transforming the trading bot into a production-ready system. + +## Phase 1: Foundation (Weeks 1-2) + +### Week 1: Security & Stability + +#### Day 1-2: Critical Security Fixes +- [ ] **Implement Encrypted Key Storage** + - Create `security/key_manager.py` + - Replace environment variable key access + - Add key rotation mechanism + - **Files**: `trade_executor.py`, `create_agent.py` + +- [ ] **Add Input Validation Framework** + - Create `validation/trading_validator.py` + - Validate all trading parameters + - Add sanitization for user inputs + - **Files**: `position_manager.py`, `trade_executor.py` + +#### Day 3-4: Risk Management +- [ ] **Implement Circuit Breakers** + - Create `risk/circuit_breaker.py` + - Add trading halt conditions + - Implement automatic recovery + - **Files**: `trade_executor.py`, `position_manager.py` + +- [ ] **Fix Import Resolution Issues** + - Update relative imports + - Add `__init__.py` files where missing + - Test all module imports + - **Files**: `main_app.py`, all strategy files + +#### Day 5-7: Code Quality +- [ ] **Refactor Dashboard Display** + - Extract `DashboardRenderer` class + - Split into market/strategy/position components + - Add configuration for display options + - **Files**: `main_app.py` + +### Week 2: Configuration & Error Handling + +#### Day 8-9: Configuration Management +- [ ] **Create Centralized Configuration** + - Create `config/settings.py` + - Move all magic numbers to config + - Add environment-specific configs + - **Files**: All Python files + +- [ ] **Standardize Error Handling** + - Create `utils/error_handlers.py` + - Implement retry decorators + - Add structured exception classes + - **Files**: All core modules + +#### Day 10-12: Database Improvements +- [ ] **Implement Connection Pool** + - Create `database/connection_pool.py` + - Replace direct SQLite connections + - Add connection health monitoring + - **Files**: `base_strategy.py`, all data access files + +- [ ] **Add Database Migrations** + - Create `database/migrations/` + - Version control schema changes + - Add rollback capabilities + - **Files**: Database schema files + +#### Day 13-14: Basic Testing +- [ ] **Create Test Framework** + - Set up `tests/` directory structure + - Add pytest configuration + - Create test fixtures and mocks + - **Files**: New test files + +## Phase 2: Performance & Testing (Weeks 3-4) + +### Week 3: Performance Optimization + +#### Day 15-17: Caching Layer +- [ ] **Implement Redis/Memory Cache** + - Create `cache/cache_manager.py` + - Cache frequently accessed data + - Add cache invalidation logic + - **Files**: `data_fetcher.py`, `base_strategy.py` + +#### Day 18-19: Async Operations +- [ ] **Convert to Async/Await** + - Identify blocking operations + - Convert to async patterns + - Add async context managers + - **Files**: `live_market_utils.py`, API calls + +#### Day 20-21: Batch Processing +- [ ] **Implement Batch Operations** + - Batch database writes + - Bulk API requests + - Optimize data processing + - **Files**: Data processing modules + +### Week 4: Testing Framework + +#### Day 22-24: Unit Tests +- [ ] **Comprehensive Unit Test Suite** + - Test all core classes + - Mock external dependencies + - Achieve >80% coverage + - **Files**: `tests/unit/` + +#### Day 25-26: Integration Tests +- [ ] **End-to-End Testing** + - Test complete workflows + - Mock Hyperliquid API + - Test process communication + - **Files**: `tests/integration/` + +#### Day 27-28: Paper Trading +- [ ] **Paper Trading Mode** + - Create simulation environment + - Mock trade execution + - Add performance tracking + - **Files**: `trade_executor.py`, new simulation files + +## Phase 3: Monitoring & Observability (Weeks 5-6) + +### Week 5: Metrics & Monitoring + +#### Day 29-31: Metrics Collection +- [ ] **Add Prometheus Metrics** + - Create `monitoring/metrics.py` + - Track key performance indicators + - Add custom business metrics + - **Files**: All core modules + +#### Day 32-33: Health Checks +- [ ] **Health Check System** + - Create `monitoring/health_check.py` + - Monitor all system components + - Add dependency checks + - **Files**: `main_app.py`, all processes + +#### Day 34-35: Alerting +- [ ] **Alerting System** + - Create `monitoring/alerts.py` + - Configure alert rules + - Add notification channels + - **Files**: New alerting files + +### Week 6: Documentation & Developer Experience + +#### Day 36-38: API Documentation +- [ ] **Auto-Generated Docs** + - Set up Sphinx/ MkDocs + - Document all public APIs + - Add code examples + - **Files**: `docs/` directory + +#### Day 39-40: Setup Improvements +- [ ] **Interactive Setup** + - Create setup wizard + - Validate configuration + - Add guided configuration + - **Files**: `setup.py`, new setup files + +#### Day 41-42: Examples & Guides +- [ ] **Strategy Examples** + - Create example strategies + - Add development tutorials + - Document best practices + - **Files**: `examples/`, `WIKI/` + +## Phase 4: Advanced Features (Weeks 7-8) + +### Week 7: Advanced Risk Management + +#### Day 43-45: Position Sizing +- [ ] **Dynamic Position Sizing** + - Volatility-based sizing + - Portfolio risk metrics + - Kelly criterion implementation + - **Files**: `position_manager.py`, new risk modules + +#### Day 46-47: Advanced Orders +- [ ] **Advanced Order Types** + - Stop-loss orders + - Take-profit orders + - Conditional orders + - **Files**: `trade_executor.py` + +#### Day 48-49: Portfolio Management +- [ ] **Portfolio Optimization** + - Correlation analysis + - Risk parity allocation + - Rebalancing logic + - **Files**: New portfolio modules + +### Week 8: Production Readiness + +#### Day 50-52: Deployment +- [ ] **Production Deployment** + - Docker containerization + - Kubernetes manifests + - CI/CD pipeline + - **Files**: `docker/`, `.github/workflows/` + +#### Day 53-54: Performance Profiling +- [ ] **Profiling Tools** + - Performance monitoring + - Memory usage tracking + - Bottleneck identification + - **Files**: New profiling modules + +#### Day 55-56: Final Polish +- [ ] **Production Hardening** + - Security audit + - Load testing + - Documentation review + - **Files**: All files + +## Implementation Guidelines + +### Daily Workflow +1. **Morning Standup**: Review progress, identify blockers +2. **Development**: Focus on assigned tasks +3. **Testing**: Write tests alongside code +4. **Code Review**: Peer review all changes +5. **Documentation**: Update docs with changes + +### Quality Gates +- All code must pass linting and formatting +- New features require unit tests +- Integration tests for critical paths +- Security review for sensitive changes + +### Risk Mitigation +- Feature flags for new functionality +- Gradual rollout with monitoring +- Rollback procedures for each change +- Regular backup and recovery testing + +## Success Criteria + +### Phase 1 Success +- [ ] All security vulnerabilities fixed +- [ ] Import resolution issues resolved +- [ ] Basic test framework in place +- [ ] Configuration management implemented + +### Phase 2 Success +- [ ] Performance improvements measured +- [ ] Test coverage >80% +- [ ] Paper trading mode functional +- [ ] Async operations implemented + +### Phase 3 Success +- [ ] Monitoring dashboard operational +- [ ] Alerting system functional +- [ ] Documentation complete +- [ ] Developer experience improved + +### Phase 4 Success +- [ ] Production deployment ready +- [ ] Advanced features working +- [ ] Performance benchmarks met +- [ ] Security audit passed + +## Resource Requirements + +### Development Team +- **Senior Python Developer**: Lead architecture and security +- **Backend Developer**: Performance and database optimization +- **DevOps Engineer**: Deployment and monitoring +- **QA Engineer**: Testing framework and automation + +### Tools & Services +- **Development**: PyCharm/VSCode, Git, Docker +- **Testing**: Pytest, Mock, Coverage tools +- **Monitoring**: Prometheus, Grafana, AlertManager +- **CI/CD**: GitHub Actions, Docker Hub +- **Documentation**: Sphinx/MkDocs, ReadTheDocs + +### Infrastructure +- **Development**: Local development environment +- **Testing**: Staging environment with test data +- **Production**: Cloud deployment with monitoring +- **Backup**: Automated backup and recovery system + +## Timeline Summary + +| Phase | Duration | Key Deliverables | +|-------|----------|------------------| +| Phase 1 | 2 weeks | Security fixes, basic testing, configuration | +| Phase 2 | 2 weeks | Performance optimization, comprehensive testing | +| Phase 3 | 2 weeks | Monitoring, documentation, developer tools | +| Phase 4 | 2 weeks | Advanced features, production deployment | +| **Total** | **8 weeks** | **Production-ready trading system** | + +This roadmap provides a structured approach to transforming the trading bot into a robust, scalable, and maintainable system suitable for production use. \ No newline at end of file diff --git a/PROJECT_REVIEW_AND_PROPOSALS.md b/PROJECT_REVIEW_AND_PROPOSALS.md new file mode 100644 index 0000000..e96a1da --- /dev/null +++ b/PROJECT_REVIEW_AND_PROPOSALS.md @@ -0,0 +1 @@ +"# Comprehensive Project Review and Improvement Proposals" diff --git a/_data/backtesting_conf.json b/_data/backtesting_conf.json index 66c76cf..aec37b7 100644 --- a/_data/backtesting_conf.json +++ b/_data/backtesting_conf.json @@ -1,6 +1,6 @@ { "sma_cross_eth_5m": { - "strategy_name": "sma_cross_2", + "strategy_name": "sma_cross_1", "script": "strategies.ma_cross_strategy.MaCrossStrategy", "optimization_params": { "fast": { diff --git a/_data/coin_id_map.json b/_data/coin_id_map.json new file mode 100644 index 0000000..f6c9d78 --- /dev/null +++ b/_data/coin_id_map.json @@ -0,0 +1,208 @@ +{ + "0G": "zero-gravity", + "2Z": "doublezero", + "AAVE": "aave", + "ACE": "endurance", + "ADA": "ada-the-dog", + "AI": "sleepless-ai", + "AI16Z": "ai16z", + "AIXBT": "aixbt", + "ALGO": "dear-algorithm", + "ALT": "altlayer", + "ANIME": "anime-token", + "APE": "ape-3", + "APEX": "apex-token-2", + "APT": "aptos", + "AR": "arweave", + "ARB": "osmosis-allarb", + "ARK": "ark-3", + "ASTER": "astar", + "ATOM": "lost-bitcoin-layer", + "AVAX": "binance-peg-avalanche", + "AVNT": "avantis", + "BABY": "baby-2", + "BADGER": "badger-dao", + "BANANA": "nforbanana", + "BCH": "bitcoin-cash", + "BERA": "berachain-bera", + "BIGTIME": "big-time", + "BIO": "bio-protocol", + "BLAST": "blast", + "BLUR": "blur", + "BLZ": "bluzelle", + "BNB": "binancecoin", + "BNT": "bancor", + "BOME": "book-of-meme", + "BRETT": "brett", + "BSV": "bitcoin-cash-sv", + "BTC": "bitcoin", + "CAKE": "pancakeswap-token", + "CANTO": "canto", + "CATI": "catizen", + "CELO": "celo", + "CFX": "cosmic-force-token-v2", + "CHILLGUY": "just-a-chill-guy", + "COMP": "compound-governance-token", + "CRV": "curve-dao-token", + "CYBER": "cyberconnect", + "DOGE": "doge-on-pulsechain", + "DOOD": "doodles", + "DOT": "xcdot", + "DYDX": "dydx-chain", + "DYM": "dymension", + "EIGEN": "eigenlayer", + "ENA": "ethena", + "ENS": "ethereum-name-service", + "ETC": "ethereum-classic", + "ETH": "ethereum", + "ETHFI": "ether-fi", + "FARTCOIN": "fartcoin-2", + "FET": "fetch-ai", + "FIL": "filecoin", + "FRIEND": "friend-tech", + "FTM": "fantom", + "FTT": "ftx-token", + "GALA": "gala", + "GAS": "gas", + "GMT": "stepn", + "GMX": "gmx", + "GOAT": "goat", + "GRASS": "grass-3", + "GRIFFAIN": "griffain", + "HBAR": "hedera-hashgraph", + "HEMI": "hemi", + "HMSTR": "hamster-kombat", + "HYPE": "hyperliquid", + "HYPER": "hyper-4", + "ILV": "illuvium", + "IMX": "immutable-x", + "INIT": "initia", + "INJ": "injective-protocol", + "IO": "io", + "IOTA": "iota-2", + "IP": "story-2", + "JELLY": "jelly-time", + "JTO": "jito-governance-token", + "JUP": "jupiter-exchange-solana", + "KAITO": "kaito", + "KAS": "wrapped-kaspa", + "LAUNCHCOIN": "ben-pasternak", + "LAYER": "unilayer", + "LDO": "linea-bridged-ldo-linea", + "LINEA": "linea", + "LINK": "osmosis-alllink", + "LISTA": "lista", + "LOOM": "loom", + "LTC": "litecoin", + "MANTA": "manta-network", + "MATIC": "matic-network", + "MAV": "maverick-protocol", + "MAVIA": "heroes-of-mavia", + "ME": "magic-eden", + "MEGA": "megaeth", + "MELANIA": "melania-meme", + "MEME": "mpx6900", + "MERL": "merlin-chain", + "MET": "metya", + "MEW": "cat-in-a-dogs-world", + "MINA": "mina-protocol", + "MKR": "maker", + "MNT": "mynth", + "MON": "mon-protocol", + "MOODENG": "moo-deng-2", + "MORPHO": "morpho", + "MOVE": "movement", + "MYRO": "myro", + "NEAR": "near", + "NEO": "neo", + "NIL": "nillion", + "NOT": "nothing-3", + "NTRN": "neutron-3", + "NXPC": "nexpace", + "OGN": "origin-protocol", + "OM": "mantra-dao", + "OMNI": "omni-2", + "ONDO": "ondo-finance", + "OP": "optimism", + "ORBS": "orbs", + "ORDI": "ordinals", + "OX": "ox-fun", + "PANDORA": "pandora", + "PAXG": "pax-gold", + "PENDLE": "pendle", + "PENGU": "pudgy-penguins", + "PEOPLE": "constitutiondao-wormhole", + "PIXEL": "pixel-3", + "PNUT": "pnut", + "POL": "proof-of-liquidity", + "POLYX": "polymesh", + "POPCAT": "popcat", + "PROMPT": "wayfinder", + "PROVE": "succinct", + "PUMP": "pump-fun", + "PURR": "purr-2", + "PYTH": "pyth-network", + "RDNT": "radiant-capital", + "RENDER": "render-token", + "REQ": "request-network", + "RESOLV": "resolv", + "REZ": "renzo", + "RLB": "rollbit-coin", + "RSR": "reserve-rights-token", + "RUNE": "thorchain", + "S": "token-s", + "SAGA": "saga-2", + "SAND": "the-sandbox-wormhole", + "SCR": "scroll", + "SEI": "sei-network", + "SHIA": "shiba-saga", + "SKY": "sky", + "SNX": "havven", + "SOL": "solana", + "SOPH": "sophon", + "SPX": "spx6900", + "STBL": "stbl", + "STG": "stargate-finance", + "STRAX": "stratis", + "STRK": "starknet", + "STX": "stox", + "SUI": "sui", + "SUPER": "superfarm", + "SUSHI": "sushi", + "SYRUP": "syrup", + "TAO": "the-anthropic-order", + "TIA": "tia", + "TNSR": "tensorium", + "TON": "tontoken", + "TRB": "tellor", + "TRUMP": "trumpeffect69420", + "TRX": "tron-bsc", + "TST": "test-3", + "TURBO": "turbo", + "UMA": "uma", + "UNI": "uni", + "UNIBOT": "unibot", + "USTC": "wrapped-ust", + "USUAL": "usual", + "VINE": "vine", + "VIRTUAL": "virtual-protocol", + "VVV": "venice-token", + "W": "w", + "WCT": "connect-token-wct", + "WIF": "wif-secondchance", + "WLD": "worldcoin-wld", + "WLFI": "world-liberty-financial", + "XAI": "xai-blockchain", + "XLM": "stellar", + "XPL": "pulse-2", + "XRP": "ripple", + "YGG": "yield-guild-games", + "YZY": "yzy", + "ZEC": "zcash", + "ZEN": "zenith-3", + "ZEREBRO": "zerebro", + "ZETA": "zeta", + "ZK": "zksync", + "ZORA": "zora", + "ZRO": "layerzero" +} \ No newline at end of file diff --git a/_data/coin_precision.json b/_data/coin_precision.json index fb9d0e8..7d927f3 100644 --- a/_data/coin_precision.json +++ b/_data/coin_precision.json @@ -101,6 +101,7 @@ "MAV": 0, "MAVIA": 1, "ME": 1, + "MEGA": 0, "MELANIA": 1, "MEME": 0, "MERL": 0, diff --git a/_data/executor_managed_positions.json b/_data/executor_managed_positions.json deleted file mode 100644 index 4a80fe9..0000000 --- a/_data/executor_managed_positions.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "sma_cross_2": { - "coin": "BTC", - "side": "short", - "size": 0.0001 - } -} \ No newline at end of file diff --git a/_data/market_cap_data.json b/_data/market_cap_data.json index 967c76d..45b1096 100644 --- a/_data/market_cap_data.json +++ b/_data/market_cap_data.json @@ -1,47 +1,1043 @@ { - "BTC_market_cap": { - "datetime_utc": "2025-10-14 19:07:32", - "market_cap": 2254100854707.6426 + "0G_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 266618645.60905194 }, - "ETH_market_cap": { - "datetime_utc": "2025-10-14 19:07:45", - "market_cap": 498260644977.71 + "2Z_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 556733577.2083987 }, - "SOL_market_cap": { - "datetime_utc": "2025-10-14 19:07:54", - "market_cap": 110493585034.85222 + "AAVE_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 3043486983.2528467 }, - "BNB_market_cap": { - "datetime_utc": "2025-10-14 19:08:01", - "market_cap": 169461959349.39044 + "ACE_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 19021793.55897193 }, - "ZEC_market_cap": { - "datetime_utc": "2025-10-14 19:08:32", - "market_cap": 3915238492.7266335 + "ADA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 252519.9851697924 }, - "SUI_market_cap": { - "datetime_utc": "2025-10-14 19:08:51", - "market_cap": 10305847774.680008 + "AI_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 7831210.097665075 }, - "STABLECOINS_market_cap": { - "datetime_utc": "2025-10-14 00:00:00", - "market_cap": 551315140796.8396 + "AI16Z_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 75642873.65855925 + }, + "AIXBT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 64014261.43038649 + }, + "ALGO_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 15203.326275876343 + }, + "ALT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 80742549.40336938 + }, + "ANIME_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 60608.99835541969 + }, + "APE_market_cap": { + "datetime_utc": "2025-11-02 00:00:00", + "timestamp_ms": 1762041600000, + "market_cap": 0.0 + }, + "APEX_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 108228391.87106021 + }, + "APT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 1997332824.5102599 + }, + "AR_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 246076704.36594132 + }, + "ARB_market_cap": { + "datetime_utc": "2025-09-28 00:00:00", + "timestamp_ms": 1759017600000, + "market_cap": 72619.00796098988 + }, + "ARK_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 411547691.74511635 }, "ASTER_market_cap": { - "datetime_utc": "2025-10-14 20:47:18", - "market_cap": 163953008.77347806 + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 122331099.54500043 + }, + "ATOM_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 29172.50856824532 + }, + "AVAX_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 33300889.08351314 + }, + "AVNT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 140133510.68489558 + }, + "BABY_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 36558.97600941408 + }, + "BADGER_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 15180909.125460342 + }, + "BANANA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 43487.97083739428 + }, + "BCH_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 10073215006.561533 + }, + "BERA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 205588072.4053306 + }, + "BIGTIME_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 51337212.447418235 + }, + "BIO_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 136544057.938857 + }, + "BLAST_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 56400161.83168796 + }, + "BLUR_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 109559269.254535 + }, + "BLZ_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 10064448.528690653 + }, + "BNB_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 136686241889.6909 + }, + "BNT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 59682479.43158628 + }, + "BOME_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 58354558.41112271 + }, + "BRETT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "BSV_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 420933092.7541339 + }, + "BTC_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 2122955589451.965 + }, + "CAKE_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 762791857.5600649 + }, + "CANTO_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 1322199.2787484196 + }, + "CATI_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 13552995.092222402 + }, + "CELO_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 135053601.0443175 + }, + "CFX_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "CHILLGUY_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 174042.3083893369 + }, + "COMP_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 315522371.1288435 + }, + "CRV_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 610727599.216157 + }, + "CYBER_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 42360332.624864906 + }, + "DOGE_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "DOOD_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 48649820.43027841 + }, + "DOT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "DYDX_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 214018887.73670998 + }, + "DYM_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 32110231.189281642 + }, + "EIGEN_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 330085205.96449065 + }, + "ENA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 2376161450.7710357 + }, + "ENS_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 446155832.60514927 + }, + "ETC_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 2313131264.106589 + }, + "ETH_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 434299353161.9843 + }, + "ETHFI_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 485118477.0247199 + }, + "FARTCOIN_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 284376.59519821126 + }, + "FET_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 544489908.0582802 + }, + "FIL_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 1016083740.0190436 + }, + "FRIEND_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 2869341.0552818435 + }, + "FTM_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "FTT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "GALA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 410214884.087835 + }, + "GAS_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 149857716.9366841 + }, + "GMT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 68841557.52603114 + }, + "GMX_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 91807230.98538564 + }, + "GOAT_market_cap": { + "datetime_utc": "2025-11-01 00:00:00", + "timestamp_ms": 1761955200000, + "market_cap": 0.0 + }, + "GRASS_market_cap": { + "datetime_utc": "2025-10-28 00:00:00", + "timestamp_ms": 1761609600000, + "market_cap": 38585.3005549229 + }, + "GRIFFAIN_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 17392444.018734735 + }, + "HBAR_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 7432895531.68406 + }, + "HEMI_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 34585774.62743865 + }, + "HMSTR_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 21881457.716577947 }, "HYPE_market_cap": { - "datetime_utc": "2025-10-14 20:55:21", - "market_cap": 10637373991.458858 + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 10744066200.305445 }, - "TOTAL_market_cap_daily": { - "datetime_utc": "2025-10-16 00:00:00", - "market_cap": 3849619103702.8604 + "HYPER_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "ILV_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 89567990.66438538 + }, + "IMX_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 867440139.0762451 + }, + "INIT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 21128296.434240393 + }, + "INJ_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 659065871.8067399 + }, + "IO_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 62505119.61215299 + }, + "IOTA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 24767325.336265605 + }, + "IP_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 1288944497.962775 + }, + "JELLY_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 14213.078326048195 + }, + "JTO_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 323919252.5143652 + }, + "JUP_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 1165472279.016814 + }, + "KAITO_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 218111601.119164 + }, + "KAS_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "LAUNCHCOIN_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 25018591.623293683 + }, + "LAYER_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 151162.8640712551 + }, + "LDO_market_cap": { + "datetime_utc": "1970-01-01 00:00:00", + "timestamp_ms": 0, + "market_cap": 0.0 + }, + "LINEA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 187698529.9079421 + }, + "LINK_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 142279.72147959325 + }, + "LISTA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 60841645.41038552 + }, + "LOOM_market_cap": { + "datetime_utc": "2025-10-31 00:00:00", + "timestamp_ms": 1761868800000, + "market_cap": 0.0 + }, + "LTC_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 6676039164.434603 + }, + "MANTA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 44172013.01770599 + }, + "MATIC_market_cap": { + "datetime_utc": "2025-10-18 00:00:00", + "timestamp_ms": 1760745600000, + "market_cap": 0.0 + }, + "MAV_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 21911706.62724472 + }, + "MAVIA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 4349810.405317102 + }, + "ME_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 67292619.84739074 + }, + "MEGA_market_cap": { + "datetime_utc": "1970-01-01 00:00:00", + "timestamp_ms": 0, + "market_cap": 0.0 + }, + "MELANIA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 83583412.31513013 + }, + "MEME_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 15763.231214597714 + }, + "MERL_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 319327522.04054844 + }, + "MET_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 32895038.943529557 + }, + "MEW_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 135877052.97681496 + }, + "MINA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 146020322.58194762 + }, + "MKR_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "MNT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 3696107.9048805744 + }, + "MON_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 9902800.530109076 + }, + "MOODENG_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 4397771.673453087 + }, + "MORPHO_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 969536559.8181872 + }, + "MOVE_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 157221083.89906973 + }, + "MYRO_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 15177548.156639775 + }, + "NEAR_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 2441358181.0741878 + }, + "NEO_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 342951027.6266851 + }, + "NIL_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 48840509.7250443 + }, + "NOT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 82734.58508245819 + }, + "NTRN_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 23300375.381838094 + }, + "NXPC_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 69995236.7033223 + }, + "OGN_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 27580147.38301667 + }, + "OM_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 103697970.86476313 + }, + "OMNI_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 175528.067636899 + }, + "ONDO_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 1939142073.1196306 + }, + "OP_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 667649627.9813179 + }, + "ORBS_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 55029682.46756348 + }, + "ORDI_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 85185811.81412612 + }, + "OX_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 157634.6740919531 + }, + "PANDORA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 3732534.93769119 + }, + "PAXG_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 1328106155.2408018 + }, + "PENDLE_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 457437840.1263986 + }, + "PENGU_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 965538140.3936106 + }, + "PEOPLE_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "PIXEL_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 17048.083686552345 + }, + "PNUT_market_cap": { + "datetime_utc": "2025-11-01 00:00:00", + "timestamp_ms": 1761955200000, + "market_cap": 41233.07758518409 + }, + "POL_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "POLYX_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 84490920.71016563 + }, + "POPCAT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 121914452.19243732 + }, + "PROMPT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 13562717.140439501 + }, + "PROVE_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 116187315.47981949 }, "PUMP_market_cap": { - "datetime_utc": "2025-10-14 21:02:30", - "market_cap": 1454398647.593871 + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 1369591728.1563232 }, - "summary_last_updated_utc": "2025-10-16T00:16:09.640449+00:00" + "PURR_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 65827661.55315251 + }, + "PYTH_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 539864997.208216 + }, + "RDNT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 17498638.40698302 + }, + "RENDER_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 1034536484.2636255 + }, + "REQ_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 91529547.52464016 + }, + "RESOLV_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 14454926.320095224 + }, + "REZ_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 35173756.487066366 + }, + "RLB_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 118321265.08017306 + }, + "RSR_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 289318202.78928894 + }, + "RUNE_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 273947984.4115854 + }, + "S_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "SAGA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 27144628.5109482 + }, + "SAND_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "SCR_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 32156822.293870974 + }, + "SEI_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 1035086503.8850844 + }, + "SHIA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 197371.205124278 + }, + "SKY_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 1233438036.0145807 + }, + "SNX_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 308463989.3443749 + }, + "SOL_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 91566229600.40993 + }, + "SOPH_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 44742895.41423028 + }, + "SPX_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 679837069.7818836 + }, + "STBL_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 33660031.81704132 + }, + "STG_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 121555604.60671808 + }, + "STRAX_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 53632087.832194604 + }, + "STRK_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 480385612.1404703 + }, + "STX_market_cap": { + "datetime_utc": "2025-10-18 00:00:00", + "timestamp_ms": 1760745600000, + "market_cap": 398553.8861129834 + }, + "SUI_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 7582956962.382049 + }, + "SUPER_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 204583506.78983364 + }, + "SUSHI_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 86457467.04928584 + }, + "SYRUP_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 452516339.28053576 + }, + "TAO_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 21277.096113283824 + }, + "TIA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 829003.4351000187 + }, + "TNSR_market_cap": { + "datetime_utc": "2025-11-03 00:00:00", + "timestamp_ms": 1762128000000, + "market_cap": 17857.56728532014 + }, + "TON_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 2403893.0332234623 + }, + "TRB_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 61551593.46789807 + }, + "TRUMP_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 22832.523567028387 + }, + "TRX_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "TST_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 14465287.743795844 + }, + "TURBO_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 129449187.01694113 + }, + "UMA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 87273593.42946458 + }, + "UNI_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 104652.2631986404 + }, + "UNIBOT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 2078443.042996259 + }, + "USTC_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "USUAL_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 28390048.92847462 + }, + "VINE_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 39341979.273077086 + }, + "VIRTUAL_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 937625889.5719905 + }, + "VVV_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 57616550.27696967 + }, + "W_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 130075.1315857268 + }, + "WCT_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 22144472.357325494 + }, + "WIF_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 110210.67851447531 + }, + "WLD_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 1643720220.8479671 + }, + "WLFI_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 3142696248.2576 + }, + "XAI_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 39683418.65722079 + }, + "XLM_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 8919546413.962923 + }, + "XPL_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 0.0 + }, + "XRP_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 139177666460.96902 + }, + "YGG_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 72289782.70350341 + }, + "YZY_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 49793986.29032182 + }, + "ZEC_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 6917445577.244665 + }, + "ZEN_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 35095.44531613883 + }, + "ZEREBRO_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 46689984.60393194 + }, + "ZETA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 14461161.692064932 + }, + "ZK_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 373362446.8403737 + }, + "ZORA_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 272850912.0455221 + }, + "ZRO_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 159343859.3580481 + }, + "STABLECOINS_market_cap": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 275682631775.5859 + }, + "TOTAL_market_cap_daily": { + "datetime_utc": "2025-11-04 00:00:00", + "timestamp_ms": 1762214400000, + "market_cap": 3609757629625.9795 + }, + "summary_last_updated_utc": "2025-11-04T00:24:54.992739+00:00" } \ No newline at end of file diff --git a/_data/opened_positions.json b/_data/opened_positions.json new file mode 100644 index 0000000..254e134 --- /dev/null +++ b/_data/opened_positions.json @@ -0,0 +1,11 @@ +{ + "copy_trader_eth_ETH": { + "strategy": "copy_trader_eth", + "coin": "ETH", + "side": "long", + "open_time_utc": "2025-11-02T20:35:02.988272+00:00", + "open_price": 3854.9, + "amount": 0.0055, + "leverage": 3 + } +} \ No newline at end of file diff --git a/_data/strategies.json b/_data/strategies.json index e4ad53e..8ea4a3a 100644 --- a/_data/strategies.json +++ b/_data/strategies.json @@ -1,12 +1,11 @@ { - "sma_cross_eth_5m": { - "enabled": true, - "script": "strategy_runner.py", + "sma_cross_1": { + "enabled": false, "class": "strategies.ma_cross_strategy.MaCrossStrategy", "agent": "scalper_agent", "parameters": { "coin": "ETH", - "timeframe": "1m", + "timeframe": "15m", "short_ma": 7, "long_ma": 44, "size": 0.0055, @@ -14,19 +13,39 @@ "leverage_short": 5 } }, - "sma_125d_btc": { - "enabled": true, - "script": "strategy_runner.py", + "sma_44d_btc": { + "enabled": false, "class": "strategies.single_sma_strategy.SingleSmaStrategy", - "agent": "swing_agent", "parameters": { + "agent": "swing", "coin": "BTC", "timeframe": "1d", "sma_period": 44, "size": 0.0001, - "leverage_long": 2, + "leverage_long": 3, "leverage_short": 1 } + }, + "copy_trader_eth": { + "enabled": true, + "is_event_driven": true, + "class": "strategies.copy_trader_strategy.CopyTraderStrategy", + "parameters": { + "agent": "scalper", + "target_address": "0x32885a6adac4375858E6edC092EfDDb0Ef46484C", + "coins_to_copy": { + "ETH": { + "size": 0.0055, + "leverage_long": 3, + "leverage_short": 3 + }, + "BTC": { + "size": 0.0002, + "leverage_long": 1, + "leverage_short": 1 + } + } + } } } diff --git a/_data/strategy_state_copy_trader_eth.json b/_data/strategy_state_copy_trader_eth.json new file mode 100644 index 0000000..003ccf3 --- /dev/null +++ b/_data/strategy_state_copy_trader_eth.json @@ -0,0 +1,7 @@ +{ + "ETH": { + "side": "long", + "size": 0.018, + "entry": 3864.2 + } +} \ No newline at end of file diff --git a/_data/strategy_status_copy_trader_eth.json b/_data/strategy_status_copy_trader_eth.json new file mode 100644 index 0000000..c75c89a --- /dev/null +++ b/_data/strategy_status_copy_trader_eth.json @@ -0,0 +1,7 @@ +{ + "strategy_name": "copy_trader_eth", + "current_signal": "WAIT", + "last_signal_change_utc": null, + "signal_price": null, + "last_checked_utc": "2025-11-02T09:55:08.460168+00:00" +} \ No newline at end of file diff --git a/_data/strategy_status_ma_cross_btc.json b/_data/strategy_status_ma_cross_btc.json deleted file mode 100644 index 7882e85..0000000 --- a/_data/strategy_status_ma_cross_btc.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "strategy_name": "ma_cross_btc", - "current_signal": "HOLD", - "last_signal_change_utc": "2025-10-12T17:00:00+00:00", - "signal_price": 114286.0, - "last_checked_utc": "2025-10-15T11:48:55.092260+00:00" -} \ No newline at end of file diff --git a/_data/strategy_status_sma_125d_btc.json b/_data/strategy_status_sma_125d_btc.json deleted file mode 100644 index 4932ea0..0000000 --- a/_data/strategy_status_sma_125d_btc.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "strategy_name": "sma_125d_btc", - "current_signal": "SELL", - "last_signal_change_utc": "2025-10-14T00:00:00+00:00", - "signal_price": 113026.0, - "last_checked_utc": "2025-10-16T10:42:03.203292+00:00" -} \ No newline at end of file diff --git a/_data/strategy_status_sma_125d_eth.json b/_data/strategy_status_sma_125d_eth.json deleted file mode 100644 index 5c77164..0000000 --- a/_data/strategy_status_sma_125d_eth.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "strategy_name": "sma_125d_eth", - "current_signal": "BUY", - "last_signal_change_utc": "2025-08-26T00:00:00+00:00", - "signal_price": 4600.63, - "last_checked_utc": "2025-10-15T17:35:17.663159+00:00" -} \ No newline at end of file diff --git a/_data/strategy_status_sma_44d_btc.json b/_data/strategy_status_sma_44d_btc.json deleted file mode 100644 index f7966e0..0000000 --- a/_data/strategy_status_sma_44d_btc.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "strategy_name": "sma_44d_btc", - "current_signal": "SELL", - "last_signal_change_utc": "2025-10-14T00:00:00+00:00", - "signal_price": 113026.0, - "last_checked_utc": "2025-10-16T10:42:03.202977+00:00" -} \ No newline at end of file diff --git a/_data/strategy_status_sma_5m_eth.json b/_data/strategy_status_sma_5m_eth.json deleted file mode 100644 index e136da7..0000000 --- a/_data/strategy_status_sma_5m_eth.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "strategy_name": "sma_5m_eth", - "current_signal": "SELL", - "last_signal_change_utc": "2025-10-15T17:30:00+00:00", - "signal_price": 3937.5, - "last_checked_utc": "2025-10-15T17:35:05.035566+00:00" -} \ No newline at end of file diff --git a/_data/strategy_status_sma_cross.json b/_data/strategy_status_sma_cross.json deleted file mode 100644 index f51a39e..0000000 --- a/_data/strategy_status_sma_cross.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "strategy_name": "sma_cross", - "current_signal": "SELL", - "last_signal_change_utc": "2025-10-15T11:45:00+00:00", - "signal_price": 111957.0, - "last_checked_utc": "2025-10-15T12:10:05.048434+00:00" -} \ No newline at end of file diff --git a/_data/strategy_status_sma_cross_1.json b/_data/strategy_status_sma_cross_1.json deleted file mode 100644 index e9a5b44..0000000 --- a/_data/strategy_status_sma_cross_1.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "strategy_name": "sma_cross_1", - "current_signal": "FLAT", - "last_signal_change_utc": "2025-10-18T20:22:00+00:00", - "signal_price": 3893.9, - "last_checked_utc": "2025-10-18T20:30:05.021192+00:00" -} \ No newline at end of file diff --git a/_data/strategy_status_sma_cross_2.json b/_data/strategy_status_sma_cross_2.json index a4b5055..0b4e967 100644 --- a/_data/strategy_status_sma_cross_2.json +++ b/_data/strategy_status_sma_cross_2.json @@ -1,7 +1,7 @@ { "strategy_name": "sma_cross_2", "current_signal": "SELL", - "last_signal_change_utc": "2025-10-20T00:00:00+00:00", - "signal_price": 110811.0, - "last_checked_utc": "2025-10-20T18:45:51.578502+00:00" + "last_signal_change_utc": "2025-10-27T00:00:00+00:00", + "signal_price": 114111.0, + "last_checked_utc": "2025-11-09T21:06:17.671443+00:00" } \ No newline at end of file diff --git a/_data/strategy_status_sma_cross_eth_5m.json b/_data/strategy_status_sma_cross_eth_5m.json deleted file mode 100644 index 58400d0..0000000 --- a/_data/strategy_status_sma_cross_eth_5m.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "strategy_name": "sma_cross_eth_5m", - "current_signal": "SELL", - "last_signal_change_utc": "2025-10-15T11:45:00+00:00", - "signal_price": 4106.1, - "last_checked_utc": "2025-10-15T12:05:05.022308+00:00" -} \ No newline at end of file diff --git a/_data/wallets_info.json b/_data/wallets_info.json new file mode 100644 index 0000000..222a8f6 --- /dev/null +++ b/_data/wallets_info.json @@ -0,0 +1,290 @@ +{ + "Whale 1 (BTC Maxi)": { + "address": "0xb83de012dba672c76a7dbbbf3e459cb59d7d6e36", + "core_state": { + "raw_state": { + "marginSummary": { + "accountValue": "30018881.1193690002", + "totalNtlPos": "182930683.6996490061", + "totalRawUsd": "212949564.8190180063", + "totalMarginUsed": "22969943.9848450013" + }, + "crossMarginSummary": { + "accountValue": "30018881.1193690002", + "totalNtlPos": "182930683.6996490061", + "totalRawUsd": "212949564.8190180063", + "totalMarginUsed": "22969943.9848450013" + }, + "crossMaintenanceMarginUsed": "5420634.4984849999", + "withdrawable": "7043396.1885489998", + "assetPositions": [ + { + "type": "oneWay", + "position": { + "coin": "BTC", + "szi": "-546.94441", + "leverage": { + "type": "cross", + "value": 10 + }, + "entryPx": "115183.2", + "positionValue": "62795781.6009199992", + "unrealizedPnl": "203045.067519", + "returnOnEquity": "0.0322299761", + "liquidationPx": "159230.7089577085", + "marginUsed": "6279578.1600919999", + "maxLeverage": 40, + "cumFunding": { + "allTime": "-6923407.0911370004", + "sinceOpen": "-6923407.0970780002", + "sinceChange": "-1574.188052" + } + } + }, + { + "type": "oneWay", + "position": { + "coin": "ETH", + "szi": "-13938.989", + "leverage": { + "type": "cross", + "value": 10 + }, + "entryPx": "4106.64", + "positionValue": "58064252.5784000009", + "unrealizedPnl": "-821803.895073", + "returnOnEquity": "-0.1435654683", + "liquidationPx": "5895.7059682083", + "marginUsed": "5806425.2578400001", + "maxLeverage": 25, + "cumFunding": { + "allTime": "-6610045.8844170002", + "sinceOpen": "-6610045.8844170002", + "sinceChange": "-730.403023" + } + } + }, + { + "type": "oneWay", + "position": { + "coin": "SOL", + "szi": "-75080.68", + "leverage": { + "type": "cross", + "value": 10 + }, + "entryPx": "201.3063", + "positionValue": "14975592.4328000005", + "unrealizedPnl": "138627.573942", + "returnOnEquity": "0.0917199656", + "liquidationPx": "519.0933515657", + "marginUsed": "1497559.2432800001", + "maxLeverage": 20, + "cumFunding": { + "allTime": "-792893.154387", + "sinceOpen": "-922.301401", + "sinceChange": "-187.682929" + } + } + }, + { + "type": "oneWay", + "position": { + "coin": "DOGE", + "szi": "-109217.0", + "leverage": { + "type": "cross", + "value": 10 + }, + "entryPx": "0.279959", + "positionValue": "22081.49306", + "unrealizedPnl": "8494.879599", + "returnOnEquity": "2.7782496288", + "liquidationPx": "213.2654356057", + "marginUsed": "2208.149306", + "maxLeverage": 10, + "cumFunding": { + "allTime": "-1875.469799", + "sinceOpen": "-1875.469799", + "sinceChange": "45.79339" + } + } + }, + { + "type": "oneWay", + "position": { + "coin": "INJ", + "szi": "-18747.2", + "leverage": { + "type": "cross", + "value": 3 + }, + "entryPx": "13.01496", + "positionValue": "162200.7744", + "unrealizedPnl": "81793.4435", + "returnOnEquity": "1.005680924", + "liquidationPx": "1208.3529290194", + "marginUsed": "54066.9248", + "maxLeverage": 10, + "cumFunding": { + "allTime": "-539.133533", + "sinceOpen": "-539.133533", + "sinceChange": "-7.367325" + } + } + }, + { + "type": "oneWay", + "position": { + "coin": "SUI", + "szi": "-376577.6", + "leverage": { + "type": "cross", + "value": 3 + }, + "entryPx": "3.85881", + "positionValue": "989495.3017599999", + "unrealizedPnl": "463648.956001", + "returnOnEquity": "0.9571980625", + "liquidationPx": "64.3045458208", + "marginUsed": "329831.767253", + "maxLeverage": 10, + "cumFunding": { + "allTime": "-45793.455728", + "sinceOpen": "-45793.450891", + "sinceChange": "-1233.875821" + } + } + }, + { + "type": "oneWay", + "position": { + "coin": "XRP", + "szi": "-39691.0", + "leverage": { + "type": "cross", + "value": 20 + }, + "entryPx": "2.468585", + "positionValue": "105486.7707", + "unrealizedPnl": "-7506.1484", + "returnOnEquity": "-1.5321699789", + "liquidationPx": "607.2856858464", + "marginUsed": "5274.338535", + "maxLeverage": 20, + "cumFunding": { + "allTime": "-2645.400002", + "sinceOpen": "-116.036833", + "sinceChange": "-116.036833" + } + } + }, + { + "type": "oneWay", + "position": { + "coin": "HYPE", + "szi": "-750315.16", + "leverage": { + "type": "cross", + "value": 5 + }, + "entryPx": "43.3419", + "positionValue": "34957933.6195600033", + "unrealizedPnl": "-2437823.0249080001", + "returnOnEquity": "-0.3748177636", + "liquidationPx": "76.3945326684", + "marginUsed": "6991586.7239119997", + "maxLeverage": 5, + "cumFunding": { + "allTime": "-1881584.4214250001", + "sinceOpen": "-1881584.4214250001", + "sinceChange": "-45247.838743" + } + } + }, + { + "type": "oneWay", + "position": { + "coin": "FARTCOIN", + "szi": "-4122236.7999999998", + "leverage": { + "type": "cross", + "value": 10 + }, + "entryPx": "0.80127", + "positionValue": "1681584.057824", + "unrealizedPnl": "1621478.3279619999", + "returnOnEquity": "4.9090151459", + "liquidationPx": "6.034656163", + "marginUsed": "168158.405782", + "maxLeverage": 10, + "cumFunding": { + "allTime": "-72941.395024", + "sinceOpen": "-51271.5204", + "sinceChange": "-6504.295598" + } + } + }, + { + "type": "oneWay", + "position": { + "coin": "PUMP", + "szi": "-1921732999.0", + "leverage": { + "type": "cross", + "value": 5 + }, + "entryPx": "0.005551", + "positionValue": "9176275.0702250004", + "unrealizedPnl": "1491738.24016", + "returnOnEquity": "0.6991640321", + "liquidationPx": "0.0166674064", + "marginUsed": "1835255.0140450001", + "maxLeverage": 10, + "cumFunding": { + "allTime": "-196004.534539", + "sinceOpen": "-196004.534539", + "sinceChange": "-9892.654861" + } + } + } + ], + "time": 1761595358385 + }, + "account_value": 30018881.119369, + "margin_used": 22969943.984845, + "margin_utilization": 0.765183215640378, + "available_margin": 7048937.134523999, + "total_position_value": 0.0, + "portfolio_leverage": 0.0 + }, + "open_orders": { + "raw_orders": [ + { + "coin": "WLFI", + "side": "B", + "limitPx": "0.10447", + "sz": "2624.0", + "oid": 194029229960, + "timestamp": 1760131688558, + "origSz": "12760.0", + "cloid": "0x00000000000000000000001261000016" + }, + { + "coin": "@166", + "side": "A", + "limitPx": "1.01", + "sz": "103038.77", + "oid": 174787748753, + "timestamp": 1758819420037, + "origSz": "3000000.0" + } + ] + }, + "account_metrics": { + "cumVlm": "2823125892.6900000572", + "nRequestsUsed": 1766294, + "nRequestsCap": 2823135892 + } + } +} \ No newline at end of file diff --git a/_data/wallets_to_track.json b/_data/wallets_to_track.json new file mode 100644 index 0000000..bcb906d --- /dev/null +++ b/_data/wallets_to_track.json @@ -0,0 +1,7 @@ +[ + { + "name": "Whale 1 (BTC Maxi)", + "address": "0xb83de012dba672c76a7dbbbf3e459cb59d7d6e36", + "tags": ["btc", "high_leverage"] + } +] diff --git a/position_logic/__init__.py b/position_logic/__init__.py new file mode 100644 index 0000000..c4127c9 --- /dev/null +++ b/position_logic/__init__.py @@ -0,0 +1,2 @@ +# This file can be empty. +# It tells Python that 'position_logic' is a directory containing modules. diff --git a/position_logic/base_logic.py b/position_logic/base_logic.py new file mode 100644 index 0000000..a6cbe99 --- /dev/null +++ b/position_logic/base_logic.py @@ -0,0 +1,31 @@ +from abc import ABC, abstractmethod +import logging + +class BasePositionLogic(ABC): + """ + Abstract base class for all strategy-specific position logic. + Defines the interface for how the PositionManager interacts with logic modules. + """ + def __init__(self, strategy_name: str, send_order_callback, log_trade_callback): + self.strategy_name = strategy_name + self.send_order = send_order_callback + self.log_trade = log_trade_callback + logging.info(f"Initialized position logic for '{strategy_name}'") + + @abstractmethod + def handle_signal(self, signal_data: dict, current_strategy_positions: dict) -> dict: + """ + The core logic method. This is called by the PositionManager when a + new signal arrives for this strategy. + + Args: + signal_data: The full signal dictionary from the strategy. + current_strategy_positions: A dict of this strategy's current positions, + keyed by coin (e.g., {"BTC": {"side": "long", ...}}). + + Returns: + A dictionary representing the new state for the *specific coin* in the + signal (e.g., {"side": "long", "size": 0.1}). + Return None to indicate the position for this coin should be closed/removed. + """ + pass diff --git a/position_logic/default_flip_logic.py b/position_logic/default_flip_logic.py new file mode 100644 index 0000000..c8dfb9a --- /dev/null +++ b/position_logic/default_flip_logic.py @@ -0,0 +1,83 @@ +import logging +from position_logic.base_logic import BasePositionLogic + +class DefaultFlipLogic(BasePositionLogic): + """ + The standard "flip-on-signal" logic used by most simple strategies + (SMA, MA Cross, and even the per-coin Copy Trader signals). + + - BUY signal: Closes any short, opens a long. + - SELL signal: Closes any long, opens a short. + - FLAT signal: Closes any open position. + """ + def handle_signal(self, signal_data: dict, current_strategy_positions: dict) -> dict: + """ + Processes a BUY, SELL, or FLAT signal and issues the necessary orders + to flip or open a position. + """ + name = self.strategy_name + params = signal_data['config']['parameters'] + coin = signal_data['coin'] + desired_signal = signal_data['signal'] + signal_price = signal_data.get('signal_price', 0) + + size = params.get('size') + leverage_long = int(params.get('leverage_long', 2)) + leverage_short = int(params.get('leverage_short', 2)) + agent_name = signal_data['config'].get("agent", "default").lower() + + # --- This logic now correctly targets a specific coin --- + current_position = current_strategy_positions.get(coin) + new_position_state = None # Return None to close position + + if desired_signal == "BUY" or desired_signal == "INIT_BUY": + new_position_state = {"coin": coin, "side": "long", "size": size} + + if not current_position: + logging.warning(f"[{name}]-[{coin}] ACTION: Setting leverage to {leverage_long}x and opening LONG.") + self.send_order(agent_name, "update_leverage", coin, is_buy=True, size=leverage_long) + self.send_order(agent_name, "market_open", coin, is_buy=True, size=size) + self.log_trade(strategy=name, coin=coin, action="OPEN_LONG", price=signal_price, size=size, signal=desired_signal) + + elif current_position['side'] == 'short': + logging.warning(f"[{name}]-[{coin}] ACTION: Closing SHORT and opening LONG with {leverage_long}x leverage.") + self.send_order(agent_name, "update_leverage", coin, is_buy=True, size=leverage_long) + self.send_order(agent_name, "market_open", coin, is_buy=True, size=current_position['size'], reduce_only=True) + self.log_trade(strategy=name, coin=coin, action="CLOSE_SHORT", price=signal_price, size=current_position['size'], signal=desired_signal) + self.send_order(agent_name, "market_open", coin, is_buy=True, size=size) + self.log_trade(strategy=name, coin=coin, action="OPEN_LONG", price=signal_price, size=size, signal=desired_signal) + + else: # Already long, do nothing + logging.info(f"[{name}]-[{coin}] INFO: Already LONG, no action taken.") + new_position_state = current_position # State is unchanged + + elif desired_signal == "SELL" or desired_signal == "INIT_SELL": + new_position_state = {"coin": coin, "side": "short", "size": size} + + if not current_position: + logging.warning(f"[{name}]-[{coin}] ACTION: Setting leverage to {leverage_short}x and opening SHORT.") + self.send_order(agent_name, "update_leverage", coin, is_buy=False, size=leverage_short) + self.send_order(agent_name, "market_open", coin, is_buy=False, size=size) + self.log_trade(strategy=name, coin=coin, action="OPEN_SHORT", price=signal_price, size=size, signal=desired_signal) + + elif current_position['side'] == 'long': + logging.warning(f"[{name}]-[{coin}] ACTION: Closing LONG and opening SHORT with {leverage_short}x leverage.") + self.send_order(agent_name, "update_leverage", coin, is_buy=False, size=leverage_short) + self.send_order(agent_name, "market_open", coin, is_buy=False, size=current_position['size'], reduce_only=True) + self.log_trade(strategy=name, coin=coin, action="CLOSE_LONG", price=signal_price, size=current_position['size'], signal=desired_signal) + self.send_order(agent_name, "market_open", coin, is_buy=False, size=size) + self.log_trade(strategy=name, coin=coin, action="OPEN_SHORT", price=signal_price, size=size, signal=desired_signal) + + else: # Already short, do nothing + logging.info(f"[{name}]-[{coin}] INFO: Already SHORT, no action taken.") + new_position_state = current_position # State is unchanged + + elif desired_signal == "FLAT": + if current_position: + logging.warning(f"[{name}]-[{coin}] ACTION: Close {current_position['side']} position.") + is_buy = current_position['side'] == 'short' # To close a short, we buy + self.send_order(agent_name, "market_open", coin, is_buy=is_buy, size=current_position['size'], reduce_only=True) + self.log_trade(strategy=name, coin=coin, action=f"CLOSE_{current_position['side'].upper()}", price=signal_price, size=current_position['size'], signal=desired_signal) + # new_position_state is already None, which will remove it + + return new_position_state diff --git a/review.md b/review.md new file mode 100644 index 0000000..d0fe34c --- /dev/null +++ b/review.md @@ -0,0 +1,79 @@ +# Project Review and Recommendations + +This review provides an analysis of the current state of the automated trading bot project, proposes specific code improvements, and identifies files that appear to be unused or are one-off utilities that could be reorganized. + +The project is a well-structured, multi-process Python application for crypto trading. It has a clear separation of concerns between data fetching, strategy execution, and trade management. The use of `multiprocessing` and a centralized `main_app.py` orchestrator is a solid architectural choice. + +The following sections detail recommendations for improving configuration management, code structure, and robustness, along with a list of files recommended for cleanup. + +--- + +## Proposed Code Changes + +### 1. Centralize Configuration + +- **Issue:** Key configuration variables like `WATCHED_COINS` and `required_timeframes` are hardcoded in `main_app.py`. This makes them difficult to change without modifying the source code. +- **Proposal:** + - Create a central configuration file, e.g., `_data/config.json`. + - Move `WATCHED_COINS` and `required_timeframes` into this new file. + - Load this configuration in `main_app.py` at startup. +- **Benefit:** Decouples configuration from code, making the application more flexible and easier to manage. + +### 2. Refactor `main_app.py` for Clarity + +- **Issue:** `main_app.py` is long and handles multiple responsibilities: process orchestration, dashboard rendering, and data reading. +- **Proposal:** + - **Abstract Process Management:** The functions for running subprocesses (e.g., `run_live_candle_fetcher`, `run_resampler_job`) contain repetitive logic for logging, shutdown handling, and process looping. This could be abstracted into a generic `ProcessRunner` class. + - **Create a Dashboard Class:** The complex dashboard rendering logic could be moved into a separate `Dashboard` class to improve separation of concerns and make the main application loop cleaner. +- **Benefit:** Improves code readability, reduces duplication, and makes the application easier to maintain and extend. + +### 3. Improve Project Structure + +- **Issue:** The root directory is cluttered with numerous Python scripts, making it difficult to distinguish between core application files, utility scripts, and old/example files. +- **Proposal:** + - Create a `scripts/` directory and move all one-off utility and maintenance scripts into it. + - Consider creating a `src/` or `app/` directory to house the core application source code (`main_app.py`, `trade_executor.py`, etc.), separating it clearly from configuration, data, and documentation. +- **Benefit:** A cleaner, more organized project structure that is easier for new developers to understand. + +### 4. Enhance Robustness and Error Handling + +- **Issue:** The agent loading in `trade_executor.py` relies on discovering environment variables by a naming convention (`_AGENT_PK`). This is clever but can be brittle if environment variables are named incorrectly. +- **Proposal:** + - Explicitly define the agent names and their corresponding environment variable keys in the proposed `_data/config.json` file. The `trade_executor` would then load only the agents specified in the configuration. +- **Benefit:** Makes agent configuration more explicit and less prone to errors from stray environment variables. + +--- + +## Identified Unused/Utility Files + +The following files were identified as likely being unused by the core application, being obsolete, or serving as one-off utilities. It is recommended to **move them to a `scripts/` directory** or **delete them** if they are obsolete. + +### Obsolete / Old Versions: +- `data_fetcher_old.py` +- `market_old.py` +- `base_strategy.py` (The one in the root directory; the one in `strategies/` is used). + +### One-Off Utility Scripts (Recommend moving to `scripts/`): +- `!migrate_to_sqlite.py` +- `import_csv.py` +- `del_market_cap_tables.py` +- `fix_timestamps.py` +- `list_coins.py` +- `create_agent.py` + +### Examples / Unused Code: +- `basic_ws.py` (Appears to be an example file). +- `backtester.py` +- `strategy_sma_cross.py` (A strategy file in the root, not in the `strategies` folder). +- `strategy_template.py` + +### Standalone / Potentially Unused Core Files: +The following files seem to have their logic already integrated into the main multi-process application. They might be remnants of a previous architecture and may not be needed as standalone scripts. +- `address_monitor.py` +- `position_monitor.py` +- `trade_log.py` +- `wallet_data.py` +- `whale_tracker.py` + +### Data / Log Files (Recommend archiving or deleting): +- `hyperliquid_wallet_data_*.json` (These appear to be backups or logs). From b8ad857ca4239c61a7af0e6b9a188241939759a9 Mon Sep 17 00:00:00 2001 From: DiTus Date: Mon, 10 Nov 2025 08:57:57 +0100 Subject: [PATCH 16/18] ignore --- .gitignore | 41 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 41 insertions(+) create mode 100644 .gitignore diff --git a/.gitignore b/.gitignore new file mode 100644 index 0000000..72bdfb1 --- /dev/null +++ b/.gitignore @@ -0,0 +1,41 @@ +# --- Secrets & Environment --- +# Ignore local environment variables +.env +# Ignore virtual environment folders +.venv/ +venv/ + +# --- Python --- +# Ignore cache files +__pycache__/ +*.py[cod] + +# --- Data & Logs --- +# Ignore all database files (db, write-ahead log, shared memory) +_data/*.db +_data/*.db-shm +_data/*.db-wal + +# Ignore all JSON files in the data folder +_data/*.json + +# Ignore all log files +_logs/ + +# --- SDK --- +# Ignore all contents of the sdk directory +sdk/ + +# --- Other --- +# Ignore custom agents directory +agents/ + +# Ignore Jekyll files +.nojekyll + +# --- Editor & OS Files --- +# Ignore VSCode, JetBrains, and macOS/Windows system files +.vscode/ +.idea/ +.DS_Store +Thumbs.db \ No newline at end of file From ac843b0f824e62821c12c5b351f60d7747605033 Mon Sep 17 00:00:00 2001 From: DiTus Date: Mon, 10 Nov 2025 08:59:07 +0100 Subject: [PATCH 17/18] sdk remove --- sdk/hyperliquid-python-sdk | 1 - 1 file changed, 1 deletion(-) delete mode 160000 sdk/hyperliquid-python-sdk diff --git a/sdk/hyperliquid-python-sdk b/sdk/hyperliquid-python-sdk deleted file mode 160000 index 64b252e..0000000 --- a/sdk/hyperliquid-python-sdk +++ /dev/null @@ -1 +0,0 @@ -Subproject commit 64b252e99d1cc211a5edc7346387fbbdae4cbdbc From bdd2d607cdb1d72b6c808891ed4bfa62f2b02b65 Mon Sep 17 00:00:00 2001 From: ditus Date: Mon, 10 Nov 2025 08:19:06 +0000 Subject: [PATCH 18/18] Delete _data/strategy_status_sma_cross_2.json --- _data/strategy_status_sma_cross_2.json | 7 ------- 1 file changed, 7 deletions(-) delete mode 100644 _data/strategy_status_sma_cross_2.json diff --git a/_data/strategy_status_sma_cross_2.json b/_data/strategy_status_sma_cross_2.json deleted file mode 100644 index 2484819..0000000 --- a/_data/strategy_status_sma_cross_2.json +++ /dev/null @@ -1,7 +0,0 @@ -{ - "strategy_name": "sma_cross_2", - "current_signal": "SELL", - "last_signal_change_utc": "2025-10-27T00:00:00+00:00", - "signal_price": 114111.0, - "last_checked_utc": "2025-11-09T21:07:17.687112+00:00" -} \ No newline at end of file