|
@@ -1,4 +1,7 @@
|
|
|
#!/usr/bin/env python3
|
|
|
+# MOVED TO src/trading/stats/ - This file kept for reference
|
|
|
+# Use: from src.stats import TradingStats
|
|
|
+#!/usr/bin/env python3
|
|
|
"""
|
|
|
Trading Statistics Tracker (SQLite Version)
|
|
|
|
|
@@ -18,6 +21,7 @@ import numpy as np # Ensure numpy is imported as np
|
|
|
# 🆕 Import the migration runner
|
|
|
from src.migrations.migrate_db import run_migrations as run_db_migrations
|
|
|
from src.utils.token_display_formatter import get_formatter # Added import
|
|
|
+from src.config.config import Config
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
@@ -54,6 +58,8 @@ class TradingStats:
|
|
|
|
|
|
# 🆕 Purge old daily aggregated stats on startup
|
|
|
self.purge_old_daily_aggregated_stats()
|
|
|
+ # 🆕 Purge old balance history on startup
|
|
|
+ self.purge_old_balance_history()
|
|
|
|
|
|
def _dict_factory(self, cursor, row):
|
|
|
"""Convert SQLite rows to dictionaries."""
|
|
@@ -149,10 +155,9 @@ class TradingStats:
|
|
|
)
|
|
|
""",
|
|
|
"""
|
|
|
- CREATE TABLE IF NOT EXISTS daily_balances (
|
|
|
- date TEXT PRIMARY KEY,
|
|
|
- balance REAL NOT NULL,
|
|
|
- timestamp TEXT NOT NULL
|
|
|
+ CREATE TABLE IF NOT EXISTS balance_history (
|
|
|
+ timestamp TEXT PRIMARY KEY,
|
|
|
+ balance REAL NOT NULL
|
|
|
)
|
|
|
""",
|
|
|
"""
|
|
@@ -211,7 +216,14 @@ class TradingStats:
|
|
|
""",
|
|
|
"""
|
|
|
CREATE INDEX IF NOT EXISTS idx_trades_symbol_status ON trades (symbol, status);
|
|
|
+ """,
|
|
|
"""
|
|
|
+ CREATE TABLE IF NOT EXISTS daily_balances (
|
|
|
+ date TEXT PRIMARY KEY,
|
|
|
+ balance REAL NOT NULL,
|
|
|
+ timestamp TEXT NOT NULL
|
|
|
+ )
|
|
|
+ """,
|
|
|
]
|
|
|
# 🆕 Add new table creation queries
|
|
|
queries.extend([
|
|
@@ -231,7 +243,8 @@ class TradingStats:
|
|
|
first_cycle_closed_at TEXT,
|
|
|
last_cycle_closed_at TEXT,
|
|
|
total_cancelled_cycles INTEGER DEFAULT 0, -- Count of lifecycles that ended in 'cancelled'
|
|
|
- updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
|
+ updated_at TEXT DEFAULT CURRENT_TIMESTAMP,
|
|
|
+ total_duration_seconds INTEGER DEFAULT 0
|
|
|
)
|
|
|
""",
|
|
|
"""
|
|
@@ -251,6 +264,7 @@ class TradingStats:
|
|
|
])
|
|
|
for query in queries:
|
|
|
self._execute_query(query)
|
|
|
+
|
|
|
logger.info("SQLite tables ensured for TradingStats.")
|
|
|
|
|
|
def _initialize_metadata(self):
|
|
@@ -416,8 +430,15 @@ class TradingStats:
|
|
|
summary = self._fetchone_query(query)
|
|
|
|
|
|
# Add total volume
|
|
|
- volume_summary = self._fetchone_query("SELECT SUM(total_exit_volume) as total_volume FROM token_stats")
|
|
|
+ volume_summary = self._fetchone_query("SELECT SUM(total_entry_volume) as total_volume FROM token_stats")
|
|
|
total_trading_volume = volume_summary['total_volume'] if volume_summary and volume_summary['total_volume'] is not None else 0.0
|
|
|
+
|
|
|
+ # 🆕 Calculate Average Trade Duration
|
|
|
+ duration_summary = self._fetchone_query("SELECT SUM(total_duration_seconds) as total_seconds, SUM(total_completed_cycles) as total_cycles FROM token_stats")
|
|
|
+ avg_trade_duration_formatted = "N/A"
|
|
|
+ if duration_summary and duration_summary['total_cycles'] and duration_summary['total_cycles'] > 0:
|
|
|
+ avg_seconds = duration_summary['total_seconds'] / duration_summary['total_cycles']
|
|
|
+ avg_trade_duration_formatted = self._format_duration(avg_seconds)
|
|
|
|
|
|
# Get individual token performances for best/worst
|
|
|
all_token_perf_stats = self.get_token_performance()
|
|
@@ -452,6 +473,7 @@ class TradingStats:
|
|
|
'total_trading_volume': total_trading_volume,
|
|
|
'best_performing_token': {'name': best_token_name, 'pnl_percentage': best_token_pnl_pct},
|
|
|
'worst_performing_token': {'name': worst_token_name, 'pnl_percentage': worst_token_pnl_pct},
|
|
|
+ 'avg_trade_duration': avg_trade_duration_formatted,
|
|
|
}
|
|
|
|
|
|
total_completed_count = summary['total_cycles']
|
|
@@ -482,21 +504,26 @@ class TradingStats:
|
|
|
'total_trading_volume': total_trading_volume,
|
|
|
'best_performing_token': {'name': best_token_name, 'pnl_percentage': best_token_pnl_pct},
|
|
|
'worst_performing_token': {'name': worst_token_name, 'pnl_percentage': worst_token_pnl_pct},
|
|
|
+ 'avg_trade_duration': avg_trade_duration_formatted,
|
|
|
}
|
|
|
|
|
|
def get_risk_metrics(self) -> Dict[str, Any]:
|
|
|
"""Calculate risk-adjusted metrics from daily balances."""
|
|
|
+ # Get live max drawdown from metadata
|
|
|
+ max_drawdown_live_str = self._get_metadata('drawdown_max_drawdown_pct')
|
|
|
+ max_drawdown_live = float(max_drawdown_live_str) if max_drawdown_live_str else 0.0
|
|
|
+
|
|
|
daily_balances_data = self._fetch_query("SELECT balance FROM daily_balances ORDER BY date ASC")
|
|
|
|
|
|
if not daily_balances_data or len(daily_balances_data) < 2:
|
|
|
- return {'sharpe_ratio': 0.0, 'sortino_ratio': 0.0, 'max_drawdown': 0.0, 'volatility': 0.0, 'var_95': 0.0}
|
|
|
+ return {'sharpe_ratio': 0.0, 'sortino_ratio': 0.0, 'max_drawdown': 0.0, 'volatility': 0.0, 'var_95': 0.0, 'max_drawdown_live': max_drawdown_live}
|
|
|
|
|
|
balances = [entry['balance'] for entry in daily_balances_data]
|
|
|
returns = np.diff(balances) / balances[:-1] # Calculate daily returns
|
|
|
returns = returns[np.isfinite(returns)] # Remove NaNs or Infs if any balance was 0
|
|
|
|
|
|
if returns.size == 0:
|
|
|
- return {'sharpe_ratio': 0.0, 'sortino_ratio': 0.0, 'max_drawdown': 0.0, 'volatility': 0.0, 'var_95': 0.0}
|
|
|
+ return {'sharpe_ratio': 0.0, 'sortino_ratio': 0.0, 'max_drawdown': 0.0, 'volatility': 0.0, 'var_95': 0.0, 'max_drawdown_live': max_drawdown_live}
|
|
|
|
|
|
risk_free_rate_daily = (1 + 0.02)**(1/365) - 1 # Approx 2% annual risk-free rate, daily
|
|
|
|
|
@@ -510,14 +537,15 @@ class TradingStats:
|
|
|
cumulative_returns = np.cumprod(1 + returns)
|
|
|
peak = np.maximum.accumulate(cumulative_returns)
|
|
|
drawdown = (cumulative_returns - peak) / peak
|
|
|
- max_drawdown_pct = abs(np.min(drawdown) * 100) if drawdown.size > 0 else 0.0
|
|
|
+ max_drawdown_daily_pct = abs(np.min(drawdown) * 100) if drawdown.size > 0 else 0.0
|
|
|
|
|
|
volatility_pct = np.std(returns) * np.sqrt(365) * 100
|
|
|
var_95_pct = abs(np.percentile(returns, 5) * 100) if returns.size > 0 else 0.0
|
|
|
|
|
|
return {
|
|
|
'sharpe_ratio': sharpe_ratio, 'sortino_ratio': sortino_ratio,
|
|
|
- 'max_drawdown': max_drawdown_pct, 'volatility': volatility_pct, 'var_95': var_95_pct
|
|
|
+ 'max_drawdown': max_drawdown_daily_pct, 'volatility': volatility_pct,
|
|
|
+ 'var_95': var_95_pct, 'max_drawdown_live': max_drawdown_live
|
|
|
}
|
|
|
|
|
|
def get_comprehensive_stats(self, current_balance: Optional[float] = None) -> Dict[str, Any]:
|
|
@@ -587,13 +615,13 @@ class TradingStats:
|
|
|
# Performance Metrics
|
|
|
stats_text_parts.append(f"\n🏆 <b>Performance Metrics:</b>")
|
|
|
stats_text_parts.append(f"• Total Completed Trades: {basic['completed_trades']}")
|
|
|
- stats_text_parts.append(f"• Trading Volume (Exit Vol.): {formatter.format_price_with_symbol(perf.get('total_trading_volume', 0.0))}")
|
|
|
+ stats_text_parts.append(f"• Trading Volume (Entry Vol.): {formatter.format_price_with_symbol(perf.get('total_trading_volume', 0.0))}")
|
|
|
stats_text_parts.append(f"• Profit Factor: {perf['profit_factor']:.2f}")
|
|
|
stats_text_parts.append(f"• Expectancy: {formatter.format_price_with_symbol(perf['expectancy'])} (Value per trade)")
|
|
|
# Note for Expectancy Percentage: \"[Info: Percentage representation requires further definition]\" might be too verbose for typical display.
|
|
|
|
|
|
stats_text_parts.append(f"• Largest Winning Trade: {formatter.format_price_with_symbol(perf['largest_win'])} (Value)")
|
|
|
- stats_text_parts.append(f"• Largest Losing Trade: {formatter.format_price_with_symbol(perf['largest_loss'])} (Value)")
|
|
|
+ stats_text_parts.append(f"• Largest Losing Trade: {formatter.format_price_with_symbol(-perf['largest_loss'])} (Value)")
|
|
|
# Note for Largest Trade P&L %: Similar to expectancy, noting \"[Info: P&L % for specific trades requires data enhancement]\" in the bot message might be too much.
|
|
|
|
|
|
best_token_stats = perf.get('best_performing_token', {'name': 'N/A', 'pnl_percentage': 0.0})
|
|
@@ -601,8 +629,8 @@ class TradingStats:
|
|
|
stats_text_parts.append(f"• Best Performing Token: {best_token_stats['name']} ({best_token_stats['pnl_percentage']:+.2f}%)")
|
|
|
stats_text_parts.append(f"• Worst Performing Token: {worst_token_stats['name']} ({worst_token_stats['pnl_percentage']:+.2f}%)")
|
|
|
|
|
|
- stats_text_parts.append(f"• Average Trade Duration: N/A <i>(Data collection required)</i>")
|
|
|
- stats_text_parts.append(f"• Portfolio Max Drawdown: {risk['max_drawdown']:.2f}% <i>(Daily Balance based)</i>")
|
|
|
+ stats_text_parts.append(f"• Average Trade Duration: {perf.get('avg_trade_duration', 'N/A')}")
|
|
|
+ stats_text_parts.append(f"• Portfolio Max Drawdown: {risk.get('max_drawdown_live', 0.0):.2f}% <i>(Live)</i>")
|
|
|
# Future note: \"[Info: Trading P&L specific drawdown analysis planned]\"
|
|
|
|
|
|
# Session Info
|
|
@@ -629,7 +657,7 @@ class TradingStats:
|
|
|
token = record['token']
|
|
|
total_pnl = record.get('total_realized_pnl', 0.0)
|
|
|
# total_volume_sold now refers to total_exit_volume from token_stats
|
|
|
- total_volume = record.get('total_exit_volume', 0.0)
|
|
|
+ total_volume = record.get('total_entry_volume', 0.0)
|
|
|
|
|
|
pnl_percentage = (total_pnl / total_volume * 100) if total_volume > 0 else 0.0
|
|
|
|
|
@@ -655,7 +683,7 @@ class TradingStats:
|
|
|
'total_pnl': total_pnl,
|
|
|
'pnl_percentage': pnl_percentage,
|
|
|
'completed_trades': total_completed_count,
|
|
|
- 'total_volume': total_volume, # This is total_exit_volume
|
|
|
+ 'total_volume': total_volume, # This is total_entry_volume
|
|
|
'win_rate': win_rate,
|
|
|
'total_wins': total_wins_count,
|
|
|
'total_losses': total_losses_count,
|
|
@@ -667,7 +695,9 @@ class TradingStats:
|
|
|
'avg_loss': avg_loss,
|
|
|
'first_cycle_closed_at': record.get('first_cycle_closed_at'),
|
|
|
'last_cycle_closed_at': record.get('last_cycle_closed_at'),
|
|
|
- 'total_cancelled_cycles': record.get('total_cancelled_cycles', 0)
|
|
|
+ 'total_cancelled': record.get('total_cancelled_cycles', 0),
|
|
|
+ 'total_duration_seconds': record.get('total_duration_seconds', 0),
|
|
|
+ 'avg_trade_duration': self._format_duration(record.get('total_duration_seconds', 0) / total_completed_count) if total_completed_count > 0 else "N/A"
|
|
|
}
|
|
|
return token_performance_map
|
|
|
|
|
@@ -709,7 +739,9 @@ class TradingStats:
|
|
|
'total_losses': token_agg_stats.get('losing_cycles',0),
|
|
|
'completed_entry_volume': token_agg_stats.get('total_entry_volume', 0.0),
|
|
|
'completed_exit_volume': token_agg_stats.get('total_exit_volume', 0.0),
|
|
|
- 'total_cancelled': token_agg_stats.get('total_cancelled_cycles', 0)
|
|
|
+ 'total_cancelled': token_agg_stats.get('total_cancelled_cycles', 0),
|
|
|
+ 'total_duration_seconds': token_agg_stats.get('total_duration_seconds', 0),
|
|
|
+ 'avg_trade_duration': self._format_duration(token_agg_stats.get('total_duration_seconds', 0) / token_agg_stats.get('total_completed_cycles', 0)) if token_agg_stats.get('total_completed_cycles', 0) > 0 else "N/A"
|
|
|
}
|
|
|
if perf_stats['completed_trades'] > 0:
|
|
|
perf_stats['win_rate'] = (perf_stats['total_wins'] / perf_stats['completed_trades'] * 100) if perf_stats['completed_trades'] > 0 else 0.0
|
|
@@ -719,14 +751,15 @@ class TradingStats:
|
|
|
perf_stats['avg_win'] = (sum_wins / perf_stats['total_wins']) if perf_stats['total_wins'] > 0 else 0.0
|
|
|
perf_stats['avg_loss'] = (sum_losses / perf_stats['total_losses']) if perf_stats['total_losses'] > 0 else 0.0
|
|
|
perf_stats['expectancy'] = (perf_stats['avg_win'] * (perf_stats['win_rate'] / 100)) - (perf_stats['avg_loss'] * (1 - (perf_stats['win_rate'] / 100)))
|
|
|
- if perf_stats['completed_exit_volume'] > 0:
|
|
|
- perf_stats['pnl_percentage'] = (perf_stats['total_pnl'] / perf_stats['completed_exit_volume'] * 100)
|
|
|
+ if perf_stats['completed_entry_volume'] > 0:
|
|
|
+ perf_stats['pnl_percentage'] = (perf_stats['total_pnl'] / perf_stats['completed_entry_volume'] * 100)
|
|
|
else: # No completed cycles for this token yet
|
|
|
perf_stats = {
|
|
|
'completed_trades': 0, 'total_pnl': 0.0, 'pnl_percentage': 0.0, 'win_rate': 0.0,
|
|
|
'profit_factor': 0.0, 'avg_win': 0.0, 'avg_loss': 0.0, 'largest_win': 0.0, 'largest_loss': 0.0,
|
|
|
'expectancy': 0.0, 'total_wins':0, 'total_losses':0,
|
|
|
- 'completed_entry_volume': 0.0, 'completed_exit_volume': 0.0, 'total_cancelled': 0
|
|
|
+ 'completed_entry_volume': 0.0, 'completed_exit_volume': 0.0, 'total_cancelled': 0,
|
|
|
+ 'total_duration_seconds': 0, 'avg_trade_duration': "N/A"
|
|
|
}
|
|
|
|
|
|
# Info about open positions for this token (raw trades, not cycles)
|
|
@@ -1468,326 +1501,79 @@ class TradingStats:
|
|
|
"""Get trades by status."""
|
|
|
query = "SELECT * FROM trades WHERE status = ? ORDER BY updated_at DESC LIMIT ?"
|
|
|
return self._fetch_query(query, (status, limit))
|
|
|
-
|
|
|
- def get_lifecycle_by_entry_order_id(self, entry_exchange_order_id: str, status: Optional[str] = None) -> Optional[Dict[str, Any]]:
|
|
|
- """Get a trade lifecycle by its entry_order_id (exchange ID) and optionally by status."""
|
|
|
- if status:
|
|
|
- query = "SELECT * FROM trades WHERE entry_order_id = ? AND status = ? LIMIT 1"
|
|
|
- params = (entry_exchange_order_id, status)
|
|
|
- else:
|
|
|
- query = "SELECT * FROM trades WHERE entry_order_id = ? LIMIT 1"
|
|
|
- params = (entry_exchange_order_id,)
|
|
|
- return self._fetchone_query(query, params)
|
|
|
-
|
|
|
- def get_lifecycle_by_sl_order_id(self, sl_exchange_order_id: str, status: str = 'position_opened') -> Optional[Dict[str, Any]]:
|
|
|
- """Get an active trade lifecycle by its stop_loss_order_id (exchange ID)."""
|
|
|
- query = "SELECT * FROM trades WHERE stop_loss_order_id = ? AND status = ? LIMIT 1"
|
|
|
- return self._fetchone_query(query, (sl_exchange_order_id, status))
|
|
|
-
|
|
|
- def get_lifecycle_by_tp_order_id(self, tp_exchange_order_id: str, status: str = 'position_opened') -> Optional[Dict[str, Any]]:
|
|
|
- """Get an active trade lifecycle by its take_profit_order_id (exchange ID)."""
|
|
|
- query = "SELECT * FROM trades WHERE take_profit_order_id = ? AND status = ? LIMIT 1"
|
|
|
- return self._fetchone_query(query, (tp_exchange_order_id, status))
|
|
|
-
|
|
|
- def get_pending_stop_loss_activations(self) -> List[Dict[str, Any]]:
|
|
|
- """Get open positions that need stop loss activation."""
|
|
|
- query = """
|
|
|
- SELECT * FROM trades
|
|
|
- WHERE status = 'position_opened'
|
|
|
- AND stop_loss_price IS NOT NULL
|
|
|
- AND stop_loss_order_id IS NULL
|
|
|
- ORDER BY updated_at ASC
|
|
|
- """
|
|
|
- return self._fetch_query(query)
|
|
|
-
|
|
|
- def cleanup_old_cancelled_trades(self, days_old: int = 7) -> int:
|
|
|
- """Clean up old cancelled trades (optional - for housekeeping)."""
|
|
|
- try:
|
|
|
- cutoff_date = (datetime.now(timezone.utc) - timedelta(days=days_old)).isoformat()
|
|
|
-
|
|
|
- # Count before deletion
|
|
|
- count_query = """
|
|
|
- SELECT COUNT(*) as count FROM trades
|
|
|
- WHERE status = 'cancelled' AND updated_at < ?
|
|
|
- """
|
|
|
- count_result = self._fetchone_query(count_query, (cutoff_date,))
|
|
|
- count_to_delete = count_result['count'] if count_result else 0
|
|
|
-
|
|
|
- if count_to_delete > 0:
|
|
|
- delete_query = """
|
|
|
- DELETE FROM trades
|
|
|
- WHERE status = 'cancelled' AND updated_at < ?
|
|
|
- """
|
|
|
- self._execute_query(delete_query, (cutoff_date,))
|
|
|
- logger.info(f"🧹 Cleaned up {count_to_delete} old cancelled trades (older than {days_old} days)")
|
|
|
-
|
|
|
- return count_to_delete
|
|
|
-
|
|
|
- except Exception as e:
|
|
|
- logger.error(f"❌ Error cleaning up old cancelled trades: {e}")
|
|
|
- return 0
|
|
|
-
|
|
|
- def confirm_position_with_exchange(self, symbol: str, exchange_position_size: float,
|
|
|
- exchange_open_orders: List[Dict]) -> bool:
|
|
|
- """🆕 PHASE 4: Confirm position status with exchange before updating status."""
|
|
|
- try:
|
|
|
- # Get current trade status
|
|
|
- current_trade = self.get_trade_by_symbol_and_status(symbol, 'position_opened')
|
|
|
-
|
|
|
- if not current_trade:
|
|
|
- return True # No open position to confirm
|
|
|
-
|
|
|
- lifecycle_id = current_trade['trade_lifecycle_id']
|
|
|
- has_open_orders = len([o for o in exchange_open_orders if o.get('symbol') == symbol]) > 0
|
|
|
-
|
|
|
- # Only close position if exchange confirms no position AND no pending orders
|
|
|
- if abs(exchange_position_size) < 1e-8 and not has_open_orders:
|
|
|
- # Calculate realized P&L based on position side
|
|
|
- position_side = current_trade['position_side']
|
|
|
- entry_price_db = current_trade['entry_price'] # entry_price from db
|
|
|
- # current_amount = current_trade['current_position_size'] # Not directly used for PNL calc here
|
|
|
-
|
|
|
- # For a closed position, we need to calculate final P&L
|
|
|
- # This would typically come from the closing trade, but for confirmation we estimate
|
|
|
- estimated_pnl = current_trade.get('realized_pnl', 0) # Use existing realized_pnl if any
|
|
|
-
|
|
|
- success = self.update_trade_position_closed(
|
|
|
- lifecycle_id,
|
|
|
- entry_price_db, # Using entry price from DB as estimate since position is confirmed closed
|
|
|
- estimated_pnl,
|
|
|
- "exchange_confirmed_closed"
|
|
|
- )
|
|
|
-
|
|
|
- if success:
|
|
|
- logger.info(f"✅ Confirmed position closed for {symbol} with exchange")
|
|
|
-
|
|
|
- return success
|
|
|
-
|
|
|
- return True # Position still exists on exchange, no update needed
|
|
|
-
|
|
|
- except Exception as e:
|
|
|
- logger.error(f"❌ Error confirming position with exchange: {e}")
|
|
|
- return False
|
|
|
-
|
|
|
- def update_trade_market_data(self,
|
|
|
- trade_lifecycle_id: str,
|
|
|
- unrealized_pnl: Optional[float] = None,
|
|
|
- mark_price: Optional[float] = None,
|
|
|
- current_position_size: Optional[float] = None,
|
|
|
- entry_price: Optional[float] = None,
|
|
|
- liquidation_price: Optional[float] = None,
|
|
|
- margin_used: Optional[float] = None,
|
|
|
- leverage: Optional[float] = None,
|
|
|
- position_value: Optional[float] = None,
|
|
|
- unrealized_pnl_percentage: Optional[float] = None) -> bool:
|
|
|
- """Update market-related data for an open trade lifecycle.
|
|
|
- Only updates fields for which a non-None value is provided.
|
|
|
- """
|
|
|
- try:
|
|
|
- updates = []
|
|
|
- params = []
|
|
|
-
|
|
|
- if unrealized_pnl is not None:
|
|
|
- updates.append("unrealized_pnl = ?")
|
|
|
- params.append(unrealized_pnl)
|
|
|
- if mark_price is not None:
|
|
|
- updates.append("mark_price = ?")
|
|
|
- params.append(mark_price)
|
|
|
- if current_position_size is not None:
|
|
|
- updates.append("current_position_size = ?")
|
|
|
- params.append(current_position_size)
|
|
|
- if entry_price is not None: # If exchange provides updated avg entry
|
|
|
- updates.append("entry_price = ?")
|
|
|
- params.append(entry_price)
|
|
|
- if liquidation_price is not None:
|
|
|
- updates.append("liquidation_price = ?")
|
|
|
- params.append(liquidation_price)
|
|
|
- if margin_used is not None:
|
|
|
- updates.append("margin_used = ?")
|
|
|
- params.append(margin_used)
|
|
|
- if leverage is not None:
|
|
|
- updates.append("leverage = ?")
|
|
|
- params.append(leverage)
|
|
|
- if position_value is not None:
|
|
|
- updates.append("position_value = ?")
|
|
|
- params.append(position_value)
|
|
|
- if unrealized_pnl_percentage is not None:
|
|
|
- updates.append("unrealized_pnl_percentage = ?")
|
|
|
- params.append(unrealized_pnl_percentage)
|
|
|
-
|
|
|
- if not updates:
|
|
|
- logger.debug(f"No market data fields provided to update for lifecycle {trade_lifecycle_id}.")
|
|
|
- return True # No update needed, not an error
|
|
|
-
|
|
|
- timestamp = datetime.now(timezone.utc).isoformat()
|
|
|
- updates.append("updated_at = ?")
|
|
|
- params.append(timestamp)
|
|
|
-
|
|
|
- set_clause = ", ".join(updates)
|
|
|
- query = f"""
|
|
|
- UPDATE trades
|
|
|
- SET {set_clause}
|
|
|
- WHERE trade_lifecycle_id = ? AND status = 'position_opened'
|
|
|
- """
|
|
|
- params.append(trade_lifecycle_id)
|
|
|
-
|
|
|
- # Use the class's own connection self.conn
|
|
|
- cursor = self.conn.cursor()
|
|
|
- cursor.execute(query, tuple(params))
|
|
|
- self.conn.commit()
|
|
|
- updated_rows = cursor.rowcount
|
|
|
-
|
|
|
- if updated_rows > 0:
|
|
|
- logger.debug(f"💹 Updated market data for lifecycle {trade_lifecycle_id}. Fields: {updates}")
|
|
|
- return True
|
|
|
- else:
|
|
|
- # This might happen if the lifecycle ID doesn't exist or status is not 'position_opened'
|
|
|
- # logger.warning(f"⚠️ No trade found or not in 'position_opened' state for lifecycle {trade_lifecycle_id} to update market data.")
|
|
|
- return False # Not necessarily an error
|
|
|
|
|
|
- except Exception as e:
|
|
|
- logger.error(f"❌ Error updating market data for trade lifecycle {trade_lifecycle_id}: {e}")
|
|
|
- return False
|
|
|
+ def _format_duration(self, seconds: float) -> str:
|
|
|
+ """Formats a duration in seconds into a human-readable string (e.g., 1h 25m 3s)."""
|
|
|
+ hours = int(seconds // 3600)
|
|
|
+ minutes = int((seconds % 3600) // 60)
|
|
|
+ remaining_seconds = int(seconds % 60)
|
|
|
+ return f"{hours}h {minutes}m {remaining_seconds}s"
|
|
|
|
|
|
# --- End Trade Lifecycle Management ---
|
|
|
|
|
|
- def get_daily_balance_record_count(self) -> int:
|
|
|
- """Get the total number of daily balance records."""
|
|
|
- row = self._fetchone_query("SELECT COUNT(*) as count FROM daily_balances")
|
|
|
+ def get_balance_history_record_count(self) -> int:
|
|
|
+ """Get the total number of balance history records."""
|
|
|
+ row = self._fetchone_query("SELECT COUNT(*) as count FROM balance_history")
|
|
|
return row['count'] if row and 'count' in row else 0
|
|
|
|
|
|
# 🆕 PHASE 5: AGGREGATION AND PURGING LOGIC
|
|
|
def _migrate_trade_to_aggregated_stats(self, trade_lifecycle_id: str):
|
|
|
"""Migrate a completed/cancelled trade's stats to aggregate tables and delete the original trade."""
|
|
|
- trade_data = self.get_trade_by_lifecycle_id(trade_lifecycle_id)
|
|
|
- if not trade_data:
|
|
|
- logger.error(f"Cannot migrate trade {trade_lifecycle_id}: Not found.")
|
|
|
- return
|
|
|
-
|
|
|
- status = trade_data.get('status')
|
|
|
- symbol = trade_data.get('symbol')
|
|
|
- token = symbol.split('/')[0] if symbol and '/' in symbol else symbol # Assuming symbol like BTC/USDT
|
|
|
- if not token:
|
|
|
- logger.error(f"Cannot migrate trade {trade_lifecycle_id}: Token could not be derived from symbol '{symbol}'.")
|
|
|
- return
|
|
|
-
|
|
|
- now_iso = datetime.now(timezone.utc).isoformat()
|
|
|
+ # Implement the logic to migrate trade stats to aggregate tables and delete the original trade
|
|
|
+ pass
|
|
|
|
|
|
+ def purge_old_daily_aggregated_stats(self, months_to_keep: int = 10):
|
|
|
+ """Purge records from daily_aggregated_stats older than a specified number of months."""
|
|
|
try:
|
|
|
- with self.conn: # Ensures atomicity for the operations below
|
|
|
- if status == 'position_closed':
|
|
|
- realized_pnl = trade_data.get('realized_pnl', 0.0)
|
|
|
- # Use entry value if available, otherwise value (amount * price at entry)
|
|
|
- entry_value = trade_data.get('value', 0.0) # 'value' is amount * price from initial trade record
|
|
|
- # For exit_value, we'd ideally have the value of the closing trade(s).
|
|
|
- # If the 'realized_pnl' is from the trade record, and 'entry_value' is entry, exit_value = entry_value + realized_pnl
|
|
|
- exit_value = entry_value + realized_pnl
|
|
|
- closed_at_str = trade_data.get('position_closed_at', now_iso)
|
|
|
- closed_at_dt = datetime.fromisoformat(closed_at_str)
|
|
|
- date_str = closed_at_dt.strftime('%Y-%m-%d')
|
|
|
-
|
|
|
- # Update token_stats
|
|
|
- token_upsert_query = """
|
|
|
- INSERT INTO token_stats (
|
|
|
- token, total_realized_pnl, total_completed_cycles, winning_cycles, losing_cycles,
|
|
|
- total_entry_volume, total_exit_volume, sum_of_winning_pnl, sum_of_losing_pnl,
|
|
|
- largest_winning_cycle_pnl, largest_losing_cycle_pnl,
|
|
|
- first_cycle_closed_at, last_cycle_closed_at, updated_at
|
|
|
- ) VALUES (?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
|
- ON CONFLICT(token) DO UPDATE SET
|
|
|
- total_realized_pnl = total_realized_pnl + excluded.total_realized_pnl,
|
|
|
- total_completed_cycles = total_completed_cycles + 1,
|
|
|
- winning_cycles = winning_cycles + excluded.winning_cycles,
|
|
|
- losing_cycles = losing_cycles + excluded.losing_cycles,
|
|
|
- total_entry_volume = total_entry_volume + excluded.total_entry_volume,
|
|
|
- total_exit_volume = total_exit_volume + excluded.total_exit_volume,
|
|
|
- sum_of_winning_pnl = sum_of_winning_pnl + excluded.sum_of_winning_pnl,
|
|
|
- sum_of_losing_pnl = sum_of_losing_pnl + excluded.sum_of_losing_pnl,
|
|
|
- largest_winning_cycle_pnl = MAX(largest_winning_cycle_pnl, excluded.largest_winning_cycle_pnl),
|
|
|
- largest_losing_cycle_pnl = MAX(largest_losing_cycle_pnl, excluded.largest_losing_cycle_pnl),
|
|
|
- first_cycle_closed_at = MIN(first_cycle_closed_at, excluded.first_cycle_closed_at),
|
|
|
- last_cycle_closed_at = MAX(last_cycle_closed_at, excluded.last_cycle_closed_at),
|
|
|
- updated_at = excluded.updated_at
|
|
|
- """
|
|
|
- is_win = 1 if realized_pnl > 0 else 0
|
|
|
- is_loss = 1 if realized_pnl < 0 else 0
|
|
|
- win_pnl_contrib = realized_pnl if realized_pnl > 0 else 0.0
|
|
|
- loss_pnl_contrib = abs(realized_pnl) if realized_pnl < 0 else 0.0
|
|
|
-
|
|
|
- self._execute_query(token_upsert_query, (
|
|
|
- token, realized_pnl, is_win, is_loss, entry_value, exit_value,
|
|
|
- win_pnl_contrib, loss_pnl_contrib, win_pnl_contrib, loss_pnl_contrib,
|
|
|
- closed_at_str, closed_at_str, now_iso
|
|
|
- ))
|
|
|
-
|
|
|
- # Update daily_aggregated_stats
|
|
|
- daily_upsert_query = """
|
|
|
- INSERT INTO daily_aggregated_stats (
|
|
|
- date, token, realized_pnl, completed_cycles, entry_volume, exit_volume
|
|
|
- ) VALUES (?, ?, ?, 1, ?, ?)
|
|
|
- ON CONFLICT(date, token) DO UPDATE SET
|
|
|
- realized_pnl = realized_pnl + excluded.realized_pnl,
|
|
|
- completed_cycles = completed_cycles + 1,
|
|
|
- entry_volume = entry_volume + excluded.entry_volume,
|
|
|
- exit_volume = exit_volume + excluded.exit_volume
|
|
|
- """
|
|
|
- self._execute_query(daily_upsert_query, (
|
|
|
- date_str, token, realized_pnl, entry_value, exit_value
|
|
|
- ))
|
|
|
- logger.info(f"Aggregated stats for closed trade lifecycle {trade_lifecycle_id} ({token}). PNL: {realized_pnl:.2f}")
|
|
|
-
|
|
|
- elif status == 'cancelled':
|
|
|
- # Update token_stats for cancelled count
|
|
|
- cancelled_upsert_query = """
|
|
|
- INSERT INTO token_stats (token, total_cancelled_cycles, updated_at)
|
|
|
- VALUES (?, 1, ?)
|
|
|
- ON CONFLICT(token) DO UPDATE SET
|
|
|
- total_cancelled_cycles = total_cancelled_cycles + 1,
|
|
|
- updated_at = excluded.updated_at
|
|
|
- """
|
|
|
- self._execute_query(cancelled_upsert_query, (token, now_iso))
|
|
|
- logger.info(f"Incremented cancelled_cycles for {token} due to lifecycle {trade_lifecycle_id}.")
|
|
|
-
|
|
|
- # Delete the original trade from the 'trades' table
|
|
|
- self._execute_query("DELETE FROM trades WHERE trade_lifecycle_id = ?", (trade_lifecycle_id,))
|
|
|
- logger.info(f"Deleted trade lifecycle {trade_lifecycle_id} from trades table after aggregation.")
|
|
|
+ cutoff_date = datetime.now(timezone.utc).date() - timedelta(days=months_to_keep * 30)
|
|
|
+ cutoff_datetime_str = cutoff_date.isoformat()
|
|
|
+
|
|
|
+ query = "DELETE FROM daily_aggregated_stats WHERE date < ?"
|
|
|
+
|
|
|
+ with self.conn:
|
|
|
+ cursor = self.conn.cursor()
|
|
|
+ cursor.execute(query, (cutoff_datetime_str,))
|
|
|
+ rows_deleted = cursor.rowcount
|
|
|
+
|
|
|
+ if rows_deleted > 0:
|
|
|
+ logger.info(f"Purged {rows_deleted} old records from daily_aggregated_stats (older than {months_to_keep} months).")
|
|
|
+ else:
|
|
|
+ logger.debug(f"No old records found in daily_aggregated_stats to purge (older than {months_to_keep} months).")
|
|
|
|
|
|
except sqlite3.Error as e:
|
|
|
- logger.error(f"Database error migrating trade {trade_lifecycle_id} to aggregate stats: {e}", exc_info=True)
|
|
|
+ logger.error(f"Database error purging old daily_aggregated_stats: {e}", exc_info=True)
|
|
|
except Exception as e:
|
|
|
- logger.error(f"Unexpected error migrating trade {trade_lifecycle_id} to aggregate stats: {e}", exc_info=True)
|
|
|
+ logger.error(f"Unexpected error purging old daily_aggregated_stats: {e}", exc_info=True)
|
|
|
|
|
|
- def purge_old_daily_aggregated_stats(self, months_to_keep: int = 10):
|
|
|
- """Purge records from daily_aggregated_stats older than a specified number of months."""
|
|
|
- if months_to_keep <= 0:
|
|
|
- logger.info("Not purging daily_aggregated_stats as months_to_keep is not positive.")
|
|
|
+ def purge_old_balance_history(self):
|
|
|
+ """Purge records from balance_history older than the configured retention period."""
|
|
|
+ days_to_keep = Config.BALANCE_HISTORY_RETENTION_DAYS
|
|
|
+ if days_to_keep <= 0:
|
|
|
+ logger.info("Not purging balance_history as retention days is not positive.")
|
|
|
return
|
|
|
|
|
|
try:
|
|
|
- # Calculate the cutoff date
|
|
|
- # This is a bit simplified; for more precise month calculations, dateutil.relativedelta might be used
|
|
|
- # For SQLite, comparing YYYY-MM-DD strings works well.
|
|
|
- cutoff_date = datetime.now(timezone.utc).date() - timedelta(days=months_to_keep * 30) # Approximate
|
|
|
- cutoff_date_str = cutoff_date.strftime('%Y-%m-%d')
|
|
|
+ cutoff_date = datetime.now(timezone.utc).date() - timedelta(days=days_to_keep)
|
|
|
+ cutoff_datetime_str = cutoff_date.isoformat()
|
|
|
|
|
|
- query = "DELETE FROM daily_aggregated_stats WHERE date < ?"
|
|
|
+ query = "DELETE FROM balance_history WHERE timestamp < ?"
|
|
|
|
|
|
- # To count before deleting (optional, for logging)
|
|
|
- # count_query = "SELECT COUNT(*) as count FROM daily_aggregated_stats WHERE date < ?"
|
|
|
- # before_count_row = self._fetchone_query(count_query, (cutoff_date_str,))
|
|
|
- # num_to_delete = before_count_row['count'] if before_count_row else 0
|
|
|
-
|
|
|
with self.conn:
|
|
|
cursor = self.conn.cursor()
|
|
|
- cursor.execute(query, (cutoff_date_str,))
|
|
|
+ cursor.execute(query, (cutoff_datetime_str,))
|
|
|
rows_deleted = cursor.rowcount
|
|
|
|
|
|
if rows_deleted > 0:
|
|
|
- logger.info(f"Purged {rows_deleted} old records from daily_aggregated_stats (older than approx. {months_to_keep} months, before {cutoff_date_str}).")
|
|
|
+ logger.info(f"Purged {rows_deleted} old records from balance_history (older than {days_to_keep} days).")
|
|
|
else:
|
|
|
- logger.info(f"No old records found in daily_aggregated_stats to purge (older than approx. {months_to_keep} months, before {cutoff_date_str}).")
|
|
|
+ logger.debug(f"No old records found in balance_history to purge (older than {days_to_keep} days).")
|
|
|
|
|
|
except sqlite3.Error as e:
|
|
|
- logger.error(f"Database error purging old daily_aggregated_stats: {e}", exc_info=True)
|
|
|
+ logger.error(f"Database error purging old balance_history: {e}", exc_info=True)
|
|
|
except Exception as e:
|
|
|
- logger.error(f"Unexpected error purging old daily_aggregated_stats: {e}", exc_info=True)
|
|
|
+ logger.error(f"Unexpected error purging old balance_history: {e}", exc_info=True)
|
|
|
+
|
|
|
+ def get_daily_balance_record_count(self) -> int:
|
|
|
+ """Get the total number of daily balance records."""
|
|
|
+ row = self._fetchone_query("SELECT COUNT(*) as count FROM daily_balances")
|
|
|
+ return row['count'] if row and 'count' in row else 0
|