refactor: Standardize CLP Manager and Hedger modules & cleanup
- **clp_manager.py**: Renamed from 'uniswap_manager.py'. Standardized logic for Uniswap V3 liquidity provision. - **clp_hedger.py**: Renamed from 'unified_hedger.py'. Consolidated hedging logic including Delta Calculation fixes, EAC (Edge Avoidance), and Fishing order implementation. - **Cleanup**: Removed legacy 'aerodrome' folder and tools. - **Monitoring**: Added Telegram monitoring scripts. - **Config**: Updated gitignore to exclude market data CSVs.
This commit is contained in:
36
florida/tools/debug_fills.py
Normal file
36
florida/tools/debug_fills.py
Normal file
@ -0,0 +1,36 @@
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
from dotenv import load_dotenv
|
||||
from hyperliquid.info import Info
|
||||
from hyperliquid.utils import constants
|
||||
|
||||
# Load env
|
||||
load_dotenv()
|
||||
address = os.environ.get("MAIN_WALLET_ADDRESS")
|
||||
|
||||
if not address:
|
||||
print("No address found")
|
||||
sys.exit(1)
|
||||
|
||||
info = Info(constants.MAINNET_API_URL, skip_ws=True)
|
||||
|
||||
try:
|
||||
print(f"Fetching fills for {address}...")
|
||||
fills = info.user_fills(address)
|
||||
|
||||
if fills:
|
||||
print(f"Found {len(fills)} fills. Inspecting first one:")
|
||||
print(json.dumps(fills[0], indent=2))
|
||||
|
||||
# Check for closedPnl
|
||||
if 'closedPnl' in fills[0]:
|
||||
print("✅ 'closedPnl' field FOUND!")
|
||||
else:
|
||||
print("❌ 'closedPnl' field NOT FOUND.")
|
||||
|
||||
else:
|
||||
print("No fills found.")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
74
florida/tools/debug_pnl_check.py
Normal file
74
florida/tools/debug_pnl_check.py
Normal file
@ -0,0 +1,74 @@
|
||||
import os
|
||||
import sys
|
||||
import json
|
||||
import time
|
||||
from decimal import Decimal
|
||||
from dotenv import load_dotenv
|
||||
from hyperliquid.info import Info
|
||||
from hyperliquid.utils import constants
|
||||
|
||||
# Load env
|
||||
current_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
sys.path.append(current_dir)
|
||||
load_dotenv(os.path.join(current_dir, '.env'))
|
||||
|
||||
address = os.environ.get("MAIN_WALLET_ADDRESS")
|
||||
if not address:
|
||||
print("No address found")
|
||||
sys.exit(1)
|
||||
|
||||
info = Info(constants.MAINNET_API_URL, skip_ws=True)
|
||||
|
||||
# Target Start Time: 2025-12-30 21:32:52 (From user JSON)
|
||||
START_TIME_MS = 1767126772 * 1000
|
||||
COIN = "BNB"
|
||||
|
||||
print(f"--- DEBUG PnL CHECK ---")
|
||||
print(f"Address: {address}")
|
||||
print(f"Coin: {COIN}")
|
||||
print(f"Start Time: {START_TIME_MS}")
|
||||
|
||||
try:
|
||||
fills = info.user_fills(address)
|
||||
|
||||
valid_fills = []
|
||||
total_closed_pnl = Decimal("0")
|
||||
total_fees = Decimal("0")
|
||||
|
||||
print(f"\n--- FILLS FOUND ---")
|
||||
print(f"{'Time':<20} | {'Side':<5} | {'Sz':<8} | {'Px':<8} | {'Fee':<8} | {'ClosedPnL':<10}")
|
||||
print("-" * 80)
|
||||
|
||||
for fill in fills:
|
||||
if fill['coin'] == COIN and fill['time'] >= START_TIME_MS:
|
||||
valid_fills.append(fill)
|
||||
|
||||
fee = Decimal(str(fill['fee']))
|
||||
pnl = Decimal(str(fill['closedPnl']))
|
||||
|
||||
total_closed_pnl += pnl
|
||||
total_fees += fee
|
||||
|
||||
ts_str = time.strftime('%H:%M:%S', time.localtime(fill['time']/1000))
|
||||
print(f"{ts_str:<20} | {fill['side']:<5} | {fill['sz']:<8} | {fill['px']:<8} | {fee:<8.4f} | {pnl:<10.4f}")
|
||||
|
||||
print("-" * 80)
|
||||
print(f"Count: {len(valid_fills)}")
|
||||
print(f"Sum Closed PnL (Gross): {total_closed_pnl:.4f}")
|
||||
print(f"Sum Fees: {total_fees:.4f}")
|
||||
|
||||
net_realized = total_closed_pnl - total_fees
|
||||
print(f"NET REALIZED (Gross - Fees): {net_realized:.4f}")
|
||||
|
||||
# Check JSON
|
||||
json_path = os.path.join(current_dir, "PANCAKESWAP_BNB_status.json")
|
||||
if os.path.exists(json_path):
|
||||
with open(json_path, 'r') as f:
|
||||
data = json.load(f)
|
||||
last_pos = data[-1]
|
||||
print(f"\n--- JSON STATE ---")
|
||||
print(f"hedge_TotPnL: {last_pos.get('hedge_TotPnL')}")
|
||||
print(f"hedge_fees_paid: {last_pos.get('hedge_fees_paid')}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
142
florida/tools/fetch_real_position_data.py
Normal file
142
florida/tools/fetch_real_position_data.py
Normal file
@ -0,0 +1,142 @@
|
||||
import json
|
||||
import os
|
||||
import math
|
||||
import sys
|
||||
from decimal import Decimal, getcontext
|
||||
from web3 import Web3
|
||||
from web3.middleware import ExtraDataToPOAMiddleware
|
||||
from eth_account import Account
|
||||
from dotenv import load_dotenv
|
||||
|
||||
# Add project root to path
|
||||
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
|
||||
|
||||
from clp_config import CLP_PROFILES
|
||||
|
||||
# Load Env
|
||||
load_dotenv()
|
||||
|
||||
# Config for PancakeSwap
|
||||
PROFILE = CLP_PROFILES["PANCAKESWAP_BNB"]
|
||||
RPC_URL = os.environ.get(PROFILE["RPC_ENV_VAR"])
|
||||
STATUS_FILE = "PANCAKESWAP_BNB_status.json"
|
||||
|
||||
# Minimal ABI for NPM
|
||||
NPM_ABI = [
|
||||
{
|
||||
"inputs": [{"internalType": "uint256", "name": "tokenId", "type": "uint256"}],
|
||||
"name": "positions",
|
||||
"outputs": [
|
||||
{"internalType": "uint96", "name": "nonce", "type": "uint96"},
|
||||
{"internalType": "address", "name": "operator", "type": "address"},
|
||||
{"internalType": "address", "name": "token0", "type": "address"},
|
||||
{"internalType": "address", "name": "token1", "type": "address"},
|
||||
{"internalType": "uint24", "name": "fee", "type": "uint24"},
|
||||
{"internalType": "int24", "name": "tickLower", "type": "int24"},
|
||||
{"internalType": "int24", "name": "tickUpper", "type": "int24"},
|
||||
{"internalType": "uint128", "name": "liquidity", "type": "uint128"},
|
||||
{"internalType": "uint256", "name": "feeGrowthInside0LastX128", "type": "uint256"},
|
||||
{"internalType": "uint256", "name": "feeGrowthInside1LastX128", "type": "uint256"},
|
||||
{"internalType": "uint128", "name": "tokensOwed0", "type": "uint128"},
|
||||
{"internalType": "uint128", "name": "tokensOwed1", "type": "uint128"}
|
||||
],
|
||||
"stateMutability": "view",
|
||||
"type": "function"
|
||||
}
|
||||
]
|
||||
|
||||
def get_price_at_tick(tick):
|
||||
return 1.0001 ** tick
|
||||
|
||||
def fetch_and_fix():
|
||||
if not RPC_URL:
|
||||
print("❌ Missing RPC URL in .env")
|
||||
return
|
||||
|
||||
print(f"Connecting to RPC: {RPC_URL}")
|
||||
w3 = Web3(Web3.HTTPProvider(RPC_URL))
|
||||
w3.middleware_onion.inject(ExtraDataToPOAMiddleware, layer=0)
|
||||
|
||||
if not w3.is_connected():
|
||||
print("❌ Failed to connect to Web3")
|
||||
return
|
||||
|
||||
npm = w3.eth.contract(address=PROFILE["NPM_ADDRESS"], abi=NPM_ABI)
|
||||
|
||||
with open(STATUS_FILE, 'r') as f:
|
||||
data = json.load(f)
|
||||
|
||||
updated_count = 0
|
||||
|
||||
for entry in data:
|
||||
token_id = entry.get('token_id')
|
||||
status = entry.get('status')
|
||||
|
||||
# We check ALL positions to be safe, or just the problematic ones.
|
||||
# Let's check any that seem to have suspect data or just refresh all active/recently active.
|
||||
# The user mentioned 6164702 specifically.
|
||||
|
||||
print(f"🔍 Checking Token ID: {token_id} ({status})")
|
||||
|
||||
try:
|
||||
pos = npm.functions.positions(token_id).call()
|
||||
# Pos structure:
|
||||
# 0: nonce, 1: operator, 2: token0, 3: token1, 4: fee,
|
||||
# 5: tickLower, 6: tickUpper, 7: liquidity ...
|
||||
|
||||
tick_lower = pos[5]
|
||||
tick_upper = pos[6]
|
||||
liquidity = pos[7]
|
||||
|
||||
# Calculate Ranges
|
||||
price_lower = get_price_at_tick(tick_lower)
|
||||
price_upper = get_price_at_tick(tick_upper)
|
||||
|
||||
# Format to 4 decimals
|
||||
new_lower = round(price_lower, 4)
|
||||
new_upper = round(price_upper, 4)
|
||||
|
||||
old_lower = entry.get('range_lower', 0)
|
||||
old_upper = entry.get('range_upper', 0)
|
||||
|
||||
# Check deviation
|
||||
if abs(new_lower - old_lower) > 0.1 or abs(new_upper - old_upper) > 0.1:
|
||||
print(f" ⚠️ Mismatch Found!")
|
||||
print(f" Old: {old_lower} - {old_upper}")
|
||||
print(f" New: {new_lower} - {new_upper}")
|
||||
|
||||
entry['range_lower'] = new_lower
|
||||
entry['range_upper'] = new_upper
|
||||
entry['liquidity'] = str(liquidity)
|
||||
|
||||
# Fix Entry Price if it looks wrong (e.g. 0 or way off range)
|
||||
# If single sided (e.g. 862-869), and spot is 860.
|
||||
# If we provided only Token0 (BNB), we are selling BNB as it goes UP.
|
||||
# So we entered 'below' the range.
|
||||
# If we assume the user just opened it, the 'entry_price' should roughly match
|
||||
# the current market price or at least be consistent.
|
||||
# Since we don't know the exact historical price, we can't perfectly fix 'entry_price'
|
||||
# without event logs.
|
||||
# HOWEVER, for the bot's logic, 'range_lower' and 'range_upper' are critical for 'in_range' checks.
|
||||
# 'entry_price' is mostly for PnL est.
|
||||
|
||||
# If entry_price is wildly different from range (e.g. 844 vs 862-869), it's confusing.
|
||||
# Let's see if we can infer something.
|
||||
# For now, we update ranges as that's the request.
|
||||
|
||||
updated_count += 1
|
||||
else:
|
||||
print(f" ✅ Data looks solid.")
|
||||
|
||||
except Exception as e:
|
||||
print(f" ❌ Error fetching chain data: {e}")
|
||||
|
||||
if updated_count > 0:
|
||||
with open(STATUS_FILE, 'w') as f:
|
||||
json.dump(data, f, indent=2)
|
||||
print(f"💾 Updated {updated_count} entries in {STATUS_FILE}")
|
||||
else:
|
||||
print("No updates needed.")
|
||||
|
||||
if __name__ == "__main__":
|
||||
fetch_and_fix()
|
||||
@ -90,7 +90,7 @@ class CandleRecorder:
|
||||
print(f"WebSocket Error: {error}")
|
||||
|
||||
def on_close(self, ws, close_status_code, close_msg):
|
||||
print("WebSocket Closed")
|
||||
print(f"WebSocket Closed: {close_status_code} - {close_msg}")
|
||||
|
||||
def on_open(self, ws):
|
||||
print("WebSocket Connected. Subscribing to allMids...")
|
||||
@ -105,17 +105,25 @@ class CandleRecorder:
|
||||
print(f"📂 Output: {self.output_file}")
|
||||
print("Press Ctrl+C to stop.")
|
||||
|
||||
# Start WS in separate thread? No, run_forever is blocking usually.
|
||||
# But we need to handle Ctrl+C.
|
||||
self.ws = websocket.WebSocketApp(
|
||||
WS_URL,
|
||||
on_open=self.on_open,
|
||||
on_message=self.on_message,
|
||||
on_error=self.on_error,
|
||||
on_close=self.on_close
|
||||
)
|
||||
|
||||
self.ws.run_forever()
|
||||
while self.running:
|
||||
try:
|
||||
self.ws = websocket.WebSocketApp(
|
||||
WS_URL,
|
||||
on_open=self.on_open,
|
||||
on_message=self.on_message,
|
||||
on_error=self.on_error,
|
||||
on_close=self.on_close
|
||||
)
|
||||
|
||||
# run_forever blocks until connection is lost
|
||||
self.ws.run_forever(ping_interval=30, ping_timeout=10)
|
||||
|
||||
except Exception as e:
|
||||
print(f"Critical Error in main loop: {e}")
|
||||
|
||||
if self.running:
|
||||
print("Connection lost. Reconnecting in 5 seconds...")
|
||||
time.sleep(5)
|
||||
|
||||
def signal_handler(sig, frame):
|
||||
print("\nStopping recorder...")
|
||||
|
||||
207
florida/tools/record_raw_ticks.py
Normal file
207
florida/tools/record_raw_ticks.py
Normal file
@ -0,0 +1,207 @@
|
||||
import argparse
|
||||
import csv
|
||||
import os
|
||||
import time
|
||||
import json
|
||||
import threading
|
||||
import signal
|
||||
import sys
|
||||
import websocket
|
||||
from datetime import datetime
|
||||
|
||||
# Setup
|
||||
PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
|
||||
MARKET_DATA_DIR = os.path.join(PROJECT_ROOT, 'market_data')
|
||||
WS_URL = "wss://api.hyperliquid.xyz/ws"
|
||||
|
||||
class MarketDataRecorder:
|
||||
def __init__(self, coin, file_prefix):
|
||||
self.coin = coin
|
||||
self.running = True
|
||||
self.ws = None
|
||||
|
||||
# File paths
|
||||
self.book_file = f"{file_prefix}_book.csv"
|
||||
self.trades_file = f"{file_prefix}_trades.csv"
|
||||
|
||||
# Buffers
|
||||
self.book_buffer = []
|
||||
self.trades_buffer = []
|
||||
self.buffer_limit = 10
|
||||
|
||||
# Ensure dir exists
|
||||
if not os.path.exists(MARKET_DATA_DIR):
|
||||
os.makedirs(MARKET_DATA_DIR)
|
||||
|
||||
# Init Book CSV
|
||||
if not os.path.exists(self.book_file):
|
||||
with open(self.book_file, 'w', newline='') as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerow(['timestamp_ms', 'local_time', 'bid_px', 'bid_sz', 'ask_px', 'ask_sz', 'mid_price'])
|
||||
|
||||
# Init Trades CSV
|
||||
if not os.path.exists(self.trades_file):
|
||||
with open(self.trades_file, 'w', newline='') as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerow(['timestamp_ms', 'local_time', 'price', 'size', 'side', 'hash'])
|
||||
|
||||
def on_message(self, ws, message):
|
||||
try:
|
||||
recv_ts = time.time()
|
||||
msg = json.loads(message)
|
||||
channel = msg.get('channel')
|
||||
data = msg.get('data', {})
|
||||
|
||||
if channel == 'l2Book':
|
||||
self.process_book(data, recv_ts)
|
||||
elif channel == 'trades':
|
||||
self.process_trades(data, recv_ts)
|
||||
|
||||
except Exception as e:
|
||||
print(f"[{datetime.now()}] Error processing: {e}")
|
||||
|
||||
def process_book(self, data, recv_ts):
|
||||
if data.get('coin') != self.coin:
|
||||
return
|
||||
|
||||
levels = data.get('levels', [])
|
||||
if levels and len(levels) >= 2:
|
||||
bids = levels[0]
|
||||
asks = levels[1]
|
||||
|
||||
if bids and asks:
|
||||
# Hyperliquid L2 format: {px: float, sz: float, n: int}
|
||||
best_bid = bids[0]
|
||||
best_ask = asks[0]
|
||||
|
||||
bid_px = float(best_bid['px'])
|
||||
bid_sz = float(best_bid['sz'])
|
||||
ask_px = float(best_ask['px'])
|
||||
ask_sz = float(best_ask['sz'])
|
||||
mid = (bid_px + ask_px) / 2
|
||||
|
||||
row = [
|
||||
int(recv_ts * 1000),
|
||||
datetime.fromtimestamp(recv_ts).strftime('%H:%M:%S.%f')[:-3],
|
||||
bid_px, bid_sz,
|
||||
ask_px, ask_sz,
|
||||
mid
|
||||
]
|
||||
self.book_buffer.append(row)
|
||||
|
||||
if len(self.book_buffer) >= self.buffer_limit:
|
||||
self.flush_book()
|
||||
|
||||
def process_trades(self, data, recv_ts):
|
||||
# Data is a list of trades
|
||||
for trade in data:
|
||||
if trade.get('coin') != self.coin:
|
||||
continue
|
||||
|
||||
# trade format: {coin, side, px, sz, time, hash}
|
||||
row = [
|
||||
int(trade.get('time', int(recv_ts * 1000))),
|
||||
datetime.fromtimestamp(trade.get('time', 0)/1000 or recv_ts).strftime('%H:%M:%S.%f')[:-3],
|
||||
float(trade['px']),
|
||||
float(trade['sz']),
|
||||
trade['side'],
|
||||
trade.get('hash', '')
|
||||
]
|
||||
self.trades_buffer.append(row)
|
||||
|
||||
if len(self.trades_buffer) >= self.buffer_limit:
|
||||
self.flush_trades()
|
||||
|
||||
def flush_book(self):
|
||||
try:
|
||||
with open(self.book_file, 'a', newline='') as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerows(self.book_buffer)
|
||||
self.book_buffer = []
|
||||
except Exception as e:
|
||||
print(f"Error writing book: {e}")
|
||||
|
||||
def flush_trades(self):
|
||||
try:
|
||||
with open(self.trades_file, 'a', newline='') as f:
|
||||
writer = csv.writer(f)
|
||||
writer.writerows(self.trades_buffer)
|
||||
|
||||
# Console Feedback
|
||||
last_trade = self.trades_buffer[-1] if self.trades_buffer else "N/A"
|
||||
if last_trade != "N/A":
|
||||
print(f"[{datetime.now().strftime('%H:%M:%S')}] 🔫 Trade: {last_trade[2]} (x{last_trade[3]}) {last_trade[4]}")
|
||||
|
||||
self.trades_buffer = []
|
||||
except Exception as e:
|
||||
print(f"Error writing trades: {e}")
|
||||
|
||||
def on_open(self, ws):
|
||||
print(f"[{datetime.now()}] Connected! Subscribing to l2Book & trades for {self.coin}...")
|
||||
|
||||
# Subscribe to Book
|
||||
ws.send(json.dumps({
|
||||
"method": "subscribe",
|
||||
"subscription": {"type": "l2Book", "coin": self.coin}
|
||||
}))
|
||||
|
||||
# Subscribe to Trades
|
||||
ws.send(json.dumps({
|
||||
"method": "subscribe",
|
||||
"subscription": {"type": "trades", "coin": self.coin}
|
||||
}))
|
||||
|
||||
def on_error(self, ws, error):
|
||||
print(f"WebSocket Error: {error}")
|
||||
|
||||
def on_close(self, ws, close_status_code, close_msg):
|
||||
print(f"WebSocket Closed: {close_status_code}")
|
||||
self.flush_book()
|
||||
self.flush_trades()
|
||||
|
||||
def start(self):
|
||||
print(f"🔴 RECORDING RAW DATA for {self.coin}")
|
||||
print(f"📘 Book Data: {self.book_file}")
|
||||
print(f"🔫 Trades Data: {self.trades_file}")
|
||||
print("Press Ctrl+C to stop.")
|
||||
|
||||
while self.running:
|
||||
try:
|
||||
self.ws = websocket.WebSocketApp(
|
||||
WS_URL,
|
||||
on_open=self.on_open,
|
||||
on_message=self.on_message,
|
||||
on_error=self.on_error,
|
||||
on_close=self.on_close
|
||||
)
|
||||
self.ws.run_forever(ping_interval=15, ping_timeout=5)
|
||||
except Exception as e:
|
||||
print(f"Critical error: {e}")
|
||||
|
||||
if self.running:
|
||||
print("Reconnecting in 1s...")
|
||||
time.sleep(1)
|
||||
|
||||
def signal_handler(sig, frame):
|
||||
print("\nStopping recorder...")
|
||||
sys.exit(0)
|
||||
|
||||
if __name__ == "__main__":
|
||||
signal.signal(signal.SIGINT, signal_handler)
|
||||
|
||||
parser = argparse.ArgumentParser(description="Record RAW Book & Trades from Hyperliquid")
|
||||
parser.add_argument("--coin", type=str, default="ETH", help="Coin symbol")
|
||||
parser.add_argument("--output", type=str, help="Base filename (will append _book.csv and _trades.csv)")
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
# Generate filename prefix
|
||||
if args.output:
|
||||
# Strip extension if user provided one like "data.csv" -> "data"
|
||||
base = os.path.splitext(args.output)[0]
|
||||
else:
|
||||
date_str = datetime.now().strftime("%Y%m%d")
|
||||
base = os.path.join(MARKET_DATA_DIR, f"{args.coin}_raw_{date_str}")
|
||||
|
||||
recorder = MarketDataRecorder(args.coin, base)
|
||||
recorder.start()
|
||||
Reference in New Issue
Block a user