added market caps

This commit is contained in:
2025-10-14 23:08:37 +02:00
parent 323a3f31de
commit bbfb549fbb
4 changed files with 425 additions and 27 deletions

View File

@ -18,11 +18,26 @@ COIN_LISTER_SCRIPT = "list_coins.py"
MARKET_FEEDER_SCRIPT = "market.py"
DATA_FETCHER_SCRIPT = "data_fetcher.py"
RESAMPLER_SCRIPT = "resampler.py"
MARKET_CAP_FETCHER_SCRIPT = "market_cap_fetcher.py"
STRATEGY_CONFIG_FILE = os.path.join("_data", "strategies.json")
PRICE_DATA_FILE = os.path.join("_data", "current_prices.json")
DB_PATH = os.path.join("_data", "market_data.db")
STATUS_FILE = os.path.join("_data", "fetcher_status.json")
LOGS_DIR = "_logs" # Directory to store logs from background processes
MARKET_CAP_SUMMARY_FILE = os.path.join("_data", "market_cap_data.json")
LOGS_DIR = "_logs"
def format_market_cap(mc_value):
"""Formats a large number into a human-readable market cap string."""
if not isinstance(mc_value, (int, float)) or mc_value == 0:
return "N/A"
if mc_value >= 1_000_000_000_000:
return f"${mc_value / 1_000_000_000_000:.2f}T"
if mc_value >= 1_000_000_000:
return f"${mc_value / 1_000_000_000:.2f}B"
if mc_value >= 1_000_000:
return f"${mc_value / 1_000_000:.2f}M"
return f"${mc_value:,.2f}"
def run_market_feeder():
@ -32,24 +47,28 @@ def run_market_feeder():
try:
with open(log_file, 'a') as f:
subprocess.run(
[sys.executable, MARKET_FEEDER_SCRIPT, "--log-level", "normal"],
[sys.executable, MARKET_FEEDER_SCRIPT, "--log-level", "off"],
check=True, stdout=f, stderr=subprocess.STDOUT
)
except (subprocess.CalledProcessError, Exception) as e:
logging.error(f"Market feeder script failed: {e}. Restarting...")
time.sleep(5)
with open(log_file, 'a') as f:
f.write(f"\n--- PROCESS ERROR at {datetime.now()} ---\n")
f.write(f"Market feeder script failed: {e}. Restarting...\n")
time.sleep(5)
def run_data_fetcher_job():
"""Defines the job for the data fetcher, redirecting output to a log file."""
log_file = os.path.join(LOGS_DIR, "data_fetcher.log")
logging.info(f"Scheduler starting data_fetcher.py task...")
try:
command = [sys.executable, DATA_FETCHER_SCRIPT, "--coins"] + WATCHED_COINS + ["--days", "7", "--log-level", "normal"]
command = [sys.executable, DATA_FETCHER_SCRIPT, "--coins"] + WATCHED_COINS + ["--days", "7", "--log-level", "off"]
with open(log_file, 'a') as f:
f.write(f"\n--- Starting data_fetcher.py job at {datetime.now()} ---\n")
subprocess.run(command, check=True, stdout=f, stderr=subprocess.STDOUT)
except Exception as e:
logging.error(f"Failed to run data_fetcher.py job: {e}")
with open(log_file, 'a') as f:
f.write(f"\n--- SCHEDULER ERROR at {datetime.now()} ---\n")
f.write(f"Failed to run data_fetcher.py job: {e}\n")
def data_fetcher_scheduler():
@ -65,13 +84,15 @@ def data_fetcher_scheduler():
def run_resampler_job():
"""Defines the job for the resampler, redirecting output to a log file."""
log_file = os.path.join(LOGS_DIR, "resampler.log")
logging.info(f"Scheduler starting resampler.py task...")
try:
command = [sys.executable, RESAMPLER_SCRIPT, "--coins"] + WATCHED_COINS + ["--log-level", "normal"]
command = [sys.executable, RESAMPLER_SCRIPT, "--coins"] + WATCHED_COINS + ["--log-level", "off"]
with open(log_file, 'a') as f:
f.write(f"\n--- Starting resampler.py job at {datetime.now()} ---\n")
subprocess.run(command, check=True, stdout=f, stderr=subprocess.STDOUT)
except Exception as e:
logging.error(f"Failed to run resampler.py job: {e}")
with open(log_file, 'a') as f:
f.write(f"\n--- SCHEDULER ERROR at {datetime.now()} ---\n")
f.write(f"Failed to run resampler.py job: {e}\n")
def resampler_scheduler():
@ -84,26 +105,52 @@ def resampler_scheduler():
time.sleep(1)
def run_market_cap_fetcher_job():
"""Defines the job for the market cap fetcher, redirecting output."""
log_file = os.path.join(LOGS_DIR, "market_cap_fetcher.log")
try:
command = [sys.executable, MARKET_CAP_FETCHER_SCRIPT, "--coins"] + WATCHED_COINS + ["--log-level", "off"]
with open(log_file, 'a') as f:
f.write(f"\n--- Starting {MARKET_CAP_FETCHER_SCRIPT} job at {datetime.now()} ---\n")
subprocess.run(command, check=True, stdout=f, stderr=subprocess.STDOUT)
except Exception as e:
with open(log_file, 'a') as f:
f.write(f"\n--- SCHEDULER ERROR at {datetime.now()} ---\n")
f.write(f"Failed to run {MARKET_CAP_FETCHER_SCRIPT} job: {e}\n")
def market_cap_fetcher_scheduler():
"""Schedules the market_cap_fetcher.py script to run daily at a specific UTC time."""
setup_logging('off', 'MarketCapScheduler')
schedule.every().day.at("00:15", "UTC").do(run_market_cap_fetcher_job)
while True:
schedule.run_pending()
time.sleep(60)
def run_strategy(strategy_name: str, config: dict):
"""Target function to run a strategy, redirecting its output to a log file."""
log_file = os.path.join(LOGS_DIR, f"strategy_{strategy_name}.log")
script_name = config['script']
params_str = json.dumps(config['parameters'])
command = [sys.executable, script_name, "--name", strategy_name, "--params", params_str, "--log-level", "normal"]
command = [sys.executable, script_name, "--name", strategy_name, "--params", params_str, "--log-level", "off"]
while True:
try:
with open(log_file, 'a') as f:
f.write(f"\n--- Starting strategy '{strategy_name}' at {datetime.now()} ---\n")
subprocess.run(command, check=True, stdout=f, stderr=subprocess.STDOUT)
except (subprocess.CalledProcessError, Exception) as e:
logging.error(f"Strategy '{strategy_name}' failed: {e}. Restarting...")
time.sleep(10)
with open(log_file, 'a') as f:
f.write(f"\n--- PROCESS ERROR at {datetime.now()} ---\n")
f.write(f"Strategy '{strategy_name}' failed: {e}. Restarting...\n")
time.sleep(10)
class MainApp:
def __init__(self, coins_to_watch: list, processes: dict):
self.watched_coins = coins_to_watch
self.prices = {}
self.market_caps = {}
self.last_db_update_info = "Initializing..."
self._lines_printed = 0
self.background_processes = processes
@ -118,6 +165,21 @@ class MainApp:
except (json.JSONDecodeError, IOError):
logging.debug("Could not read price file.")
def read_market_caps(self):
"""Reads the latest market cap summary from its JSON file."""
if os.path.exists(MARKET_CAP_SUMMARY_FILE):
try:
with open(MARKET_CAP_SUMMARY_FILE, 'r', encoding='utf-8') as f:
summary_data = json.load(f)
# Extract just the market cap value for each coin
for coin in self.watched_coins:
table_key = f"{coin}_market_cap"
if table_key in summary_data:
self.market_caps[coin] = summary_data[table_key].get('market_cap')
except (json.JSONDecodeError, IOError):
logging.debug("Could not read market cap summary file.")
def get_overall_db_status(self):
"""Reads the fetcher status from the status file."""
if os.path.exists(STATUS_FILE):
@ -131,15 +193,14 @@ class MainApp:
dt_utc = datetime.fromisoformat(timestamp_utc_str.replace('Z', '+00:00')).replace(tzinfo=timezone.utc)
dt_local = dt_utc.astimezone(None)
# --- FIX: Manually format the UTC offset ---
offset = dt_local.utcoffset()
offset_hours = int(offset.total_seconds() / 3600)
sign = '+' if offset_hours >= 0 else ''
offset_str = f"(UTC{sign}{offset_hours})"
offset_str = f"UTC{sign}{offset_hours}"
timestamp_display = f"{dt_local.strftime('%Y-%m-%d %H:%M:%S')} {offset_str}"
else:
timestamp_display = "N/A"
self.last_db_update_info = f"{coin} at {timestamp_display} ({num_candles} candles)"
self.last_db_update_info = f"{coin} at {timestamp_display} | {num_candles} candles"
except (IOError, json.JSONDecodeError):
self.last_db_update_info = "Error reading status file."
@ -149,20 +210,28 @@ class MainApp:
self.process_status[name] = "Running" if process.is_alive() else "STOPPED"
def display_dashboard(self):
"""Displays a formatted table without blinking."""
if self._lines_printed > 0: print(f"\x1b[{self._lines_printed}A", end="")
"""Displays a formatted table without blinking by overwriting previous lines."""
if self._lines_printed > 0:
print(f"\x1b[{self._lines_printed}A", end="")
output_lines = ["--- Market Dashboard ---"]
table_width = 26
table_width = 44
output_lines.append("-" * table_width)
output_lines.append(f"{'#':<2} | {'Coin':<6} | {'Live Price':>10} |")
output_lines.append(f"{'#':<2} | {'Coin':<6} | {'Live Price':>10} | {'Market Cap':>15} |")
output_lines.append("-" * table_width)
for i, coin in enumerate(self.watched_coins, 1):
price = self.prices.get(coin, "Loading...")
output_lines.append(f"{i:<2} | {coin:<6} | {price:>10} |")
market_cap = self.market_caps.get(coin)
formatted_mc = format_market_cap(market_cap)
output_lines.append(f"{i:<2} | {coin:<6} | {price:>10} | {formatted_mc:>15} |")
output_lines.append("-" * table_width)
output_lines.append("DB Status:")
output_lines.append(f" Last update -> {self.last_db_update_info}")
status_prefix = "DB Status: Last update -> "
max_len = 80
status_message = f"{status_prefix}{self.last_db_update_info}"
if len(status_message) > max_len:
status_message = status_message[:max_len-3] + "..."
output_lines.append(status_message)
output_lines.append("--- Background Processes ---")
for name, status in self.process_status.items():
@ -170,6 +239,7 @@ class MainApp:
final_output = "\n".join(output_lines) + "\n\x1b[J"
print(final_output, end="")
self._lines_printed = len(output_lines)
sys.stdout.flush()
@ -177,6 +247,7 @@ class MainApp:
"""Main loop to read data, display dashboard, and check processes."""
while True:
self.read_prices()
self.read_market_caps()
self.get_overall_db_status()
self.check_process_status()
self.display_dashboard()
@ -186,7 +257,6 @@ class MainApp:
if __name__ == "__main__":
setup_logging('normal', 'MainApp')
# Create logs directory if it doesn't exist
if not os.path.exists(LOGS_DIR):
os.makedirs(LOGS_DIR)
@ -199,12 +269,11 @@ if __name__ == "__main__":
processes = {}
# Start Data Pipeline Processes
processes["Market Feeder"] = multiprocessing.Process(target=run_market_feeder, daemon=True)
processes["Data Fetcher"] = multiprocessing.Process(target=data_fetcher_scheduler, daemon=True)
processes["Resampler"] = multiprocessing.Process(target=resampler_scheduler, daemon=True)
processes["Market Cap Fetcher"] = multiprocessing.Process(target=market_cap_fetcher_scheduler, daemon=True)
# Start Strategy Processes based on config
try:
with open(STRATEGY_CONFIG_FILE, 'r') as f:
strategy_configs = json.load(f)
@ -215,7 +284,6 @@ if __name__ == "__main__":
except (FileNotFoundError, json.JSONDecodeError) as e:
logging.error(f"Could not load strategies from '{STRATEGY_CONFIG_FILE}': {e}")
# Launch all processes
for name, proc in processes.items():
logging.info(f"Starting process '{name}'...")
proc.start()