|
@@ -52,6 +52,9 @@ class TradingStats:
|
|
|
self._create_tables() # CREATE IF NOT EXISTS will still be useful for first-time setup
|
|
|
self._initialize_metadata() # Also potentially sets schema_version if DB was just created
|
|
|
|
|
|
+ # 🆕 Purge old daily aggregated stats on startup
|
|
|
+ self.purge_old_daily_aggregated_stats()
|
|
|
+
|
|
|
def _dict_factory(self, cursor, row):
|
|
|
"""Convert SQLite rows to dictionaries."""
|
|
|
d = {}
|
|
@@ -210,6 +213,42 @@ class TradingStats:
|
|
|
CREATE INDEX IF NOT EXISTS idx_trades_symbol_status ON trades (symbol, status);
|
|
|
"""
|
|
|
]
|
|
|
+ # 🆕 Add new table creation queries
|
|
|
+ queries.extend([
|
|
|
+ """
|
|
|
+ CREATE TABLE IF NOT EXISTS token_stats (
|
|
|
+ token TEXT PRIMARY KEY,
|
|
|
+ total_realized_pnl REAL DEFAULT 0.0,
|
|
|
+ total_completed_cycles INTEGER DEFAULT 0,
|
|
|
+ winning_cycles INTEGER DEFAULT 0,
|
|
|
+ losing_cycles INTEGER DEFAULT 0,
|
|
|
+ total_entry_volume REAL DEFAULT 0.0, -- Sum of (amount * entry_price) for completed cycles
|
|
|
+ total_exit_volume REAL DEFAULT 0.0, -- Sum of (amount * exit_price) for completed cycles
|
|
|
+ sum_of_winning_pnl REAL DEFAULT 0.0,
|
|
|
+ sum_of_losing_pnl REAL DEFAULT 0.0, -- Stored as a positive value
|
|
|
+ largest_winning_cycle_pnl REAL DEFAULT 0.0,
|
|
|
+ largest_losing_cycle_pnl REAL DEFAULT 0.0, -- Stored as a positive value
|
|
|
+ first_cycle_closed_at TEXT,
|
|
|
+ last_cycle_closed_at TEXT,
|
|
|
+ total_cancelled_cycles INTEGER DEFAULT 0, -- Count of lifecycles that ended in 'cancelled'
|
|
|
+ updated_at TEXT DEFAULT CURRENT_TIMESTAMP
|
|
|
+ )
|
|
|
+ """,
|
|
|
+ """
|
|
|
+ CREATE TABLE IF NOT EXISTS daily_aggregated_stats (
|
|
|
+ date TEXT NOT NULL, -- YYYY-MM-DD
|
|
|
+ token TEXT NOT NULL, -- Specific token or a general identifier like '_OVERALL_'
|
|
|
+ realized_pnl REAL DEFAULT 0.0,
|
|
|
+ completed_cycles INTEGER DEFAULT 0,
|
|
|
+ entry_volume REAL DEFAULT 0.0,
|
|
|
+ exit_volume REAL DEFAULT 0.0,
|
|
|
+ PRIMARY KEY (date, token)
|
|
|
+ )
|
|
|
+ """,
|
|
|
+ """
|
|
|
+ CREATE INDEX IF NOT EXISTS idx_daily_stats_date_token ON daily_aggregated_stats (date, token);
|
|
|
+ """
|
|
|
+ ])
|
|
|
for query in queries:
|
|
|
self._execute_query(query)
|
|
|
logger.info("SQLite tables ensured for TradingStats.")
|
|
@@ -306,185 +345,29 @@ class TradingStats:
|
|
|
# logger.debug(f"No trade found for {symbol} with status {status}")
|
|
|
return trade
|
|
|
|
|
|
- def calculate_completed_trade_cycles(self) -> List[Dict[str, Any]]:
|
|
|
- """
|
|
|
- Calculate completed trade cycles (full position open to close) using FIFO method from DB trades.
|
|
|
- Handles both long and short cycles. PNL is summed from individual trade records.
|
|
|
+ def get_basic_stats(self, current_balance: Optional[float] = None) -> Dict[str, Any]:
|
|
|
+ """Get basic trading statistics from DB, primarily using aggregated tables."""
|
|
|
+
|
|
|
+ # Get counts of open positions (trades that are not yet migrated)
|
|
|
+ open_positions_count = self._get_open_positions_count_from_db()
|
|
|
+
|
|
|
+ # Get overall aggregated stats from token_stats table
|
|
|
+ query_token_stats_summary = """
|
|
|
+ SELECT
|
|
|
+ SUM(total_realized_pnl) as total_pnl_from_cycles,
|
|
|
+ SUM(total_completed_cycles) as total_completed_cycles_sum,
|
|
|
+ MIN(first_cycle_closed_at) as overall_first_cycle_closed,
|
|
|
+ MAX(last_cycle_closed_at) as overall_last_cycle_closed
|
|
|
+ FROM token_stats
|
|
|
"""
|
|
|
- completed_cycles = []
|
|
|
- # symbol -> {
|
|
|
- # 'open_legs': [{'side', 'amount_remaining', 'price', 'timestamp', 'value', 'pnl_contribution'}], # Holds fills of the current open leg
|
|
|
- # 'cycle_trades_details': [trade_dict_from_db], # All trades part of the current forming cycle
|
|
|
- # 'cycle_start_ts': timestamp_str,
|
|
|
- # 'current_leg_type': 'long' | 'short' | None
|
|
|
- # }
|
|
|
- open_positions_data = defaultdict(lambda: {
|
|
|
- 'open_legs': [],
|
|
|
- 'cycle_trades_details': [],
|
|
|
- 'cycle_start_ts': None,
|
|
|
- 'current_leg_type': None
|
|
|
- })
|
|
|
-
|
|
|
- all_trades = self.get_all_trades() # Trades now include their 'pnl' contribution
|
|
|
-
|
|
|
- for trade in all_trades:
|
|
|
- symbol = trade['symbol']
|
|
|
- side = trade['side'].lower() # Ensure lowercase
|
|
|
- amount = trade['amount']
|
|
|
- price = trade['price']
|
|
|
- timestamp = trade['timestamp']
|
|
|
- trade_pnl = trade.get('pnl', 0.0) # PNL from this specific fill
|
|
|
-
|
|
|
- pos_data = open_positions_data[symbol]
|
|
|
-
|
|
|
- current_trade_detail = {**trade} # Copy trade details
|
|
|
-
|
|
|
- if pos_data['current_leg_type'] is None: # Starting a new potential cycle
|
|
|
- pos_data['current_leg_type'] = 'long' if side == 'buy' else 'short'
|
|
|
- pos_data['cycle_start_ts'] = timestamp
|
|
|
- pos_data['open_legs'].append({
|
|
|
- 'side': side, 'amount_remaining': amount, 'price': price,
|
|
|
- 'timestamp': timestamp, 'value': amount * price,
|
|
|
- 'pnl_contribution': trade_pnl # PNL of opening trade usually 0
|
|
|
- })
|
|
|
- pos_data['cycle_trades_details'] = [current_trade_detail]
|
|
|
-
|
|
|
- elif (side == 'buy' and pos_data['current_leg_type'] == 'long') or \
|
|
|
- (side == 'sell' and pos_data['current_leg_type'] == 'short'):
|
|
|
- # Increasing an existing long or short position
|
|
|
- pos_data['open_legs'].append({
|
|
|
- 'side': side, 'amount_remaining': amount, 'price': price,
|
|
|
- 'timestamp': timestamp, 'value': amount * price,
|
|
|
- 'pnl_contribution': trade_pnl
|
|
|
- })
|
|
|
- pos_data['cycle_trades_details'].append(current_trade_detail)
|
|
|
+ token_stats_summary = self._fetchone_query(query_token_stats_summary)
|
|
|
|
|
|
- elif (side == 'sell' and pos_data['current_leg_type'] == 'long'): # Selling to reduce/close long
|
|
|
- pos_data['cycle_trades_details'].append(current_trade_detail)
|
|
|
- sell_amount_remaining = amount
|
|
|
-
|
|
|
- while sell_amount_remaining > 0 and pos_data['open_legs']:
|
|
|
- oldest_leg_fill = pos_data['open_legs'][0] # FIFO
|
|
|
-
|
|
|
- match_amount = min(sell_amount_remaining, oldest_leg_fill['amount_remaining'])
|
|
|
-
|
|
|
- oldest_leg_fill['amount_remaining'] -= match_amount
|
|
|
- sell_amount_remaining -= match_amount
|
|
|
-
|
|
|
- if oldest_leg_fill['amount_remaining'] <= 1e-9:
|
|
|
- pos_data['open_legs'].pop(0)
|
|
|
-
|
|
|
- if not pos_data['open_legs']: # Long cycle closed
|
|
|
- # Compile cycle
|
|
|
- cycle_pnl = sum(t.get('pnl', 0.0) for t in pos_data['cycle_trades_details'])
|
|
|
-
|
|
|
- cycle_buys = [t for t in pos_data['cycle_trades_details'] if t['side'] == 'buy']
|
|
|
- cycle_sells = [t for t in pos_data['cycle_trades_details'] if t['side'] == 'sell']
|
|
|
- total_amount_bought = sum(t['amount'] for t in cycle_buys)
|
|
|
- total_buy_value = sum(t['value'] for t in cycle_buys)
|
|
|
- total_amount_sold = sum(t['amount'] for t in cycle_sells) # Should match total_amount_bought
|
|
|
- total_sell_value = sum(t['value'] for t in cycle_sells)
|
|
|
-
|
|
|
- completed_cycle = {
|
|
|
- 'symbol': symbol,
|
|
|
- 'token': symbol.split('/')[0] if '/' in symbol else symbol,
|
|
|
- 'cycle_start': pos_data['cycle_start_ts'],
|
|
|
- 'cycle_end': timestamp, # End time is the timestamp of the closing trade
|
|
|
- 'cycle_type': 'long',
|
|
|
- 'buy_orders': len(cycle_buys),
|
|
|
- 'sell_orders': len(cycle_sells),
|
|
|
- 'total_orders': len(pos_data['cycle_trades_details']),
|
|
|
- 'total_amount': total_amount_bought,
|
|
|
- 'avg_entry_price': total_buy_value / total_amount_bought if total_amount_bought > 0 else 0,
|
|
|
- 'avg_exit_price': total_sell_value / total_amount_sold if total_amount_sold > 0 else 0,
|
|
|
- 'total_pnl': cycle_pnl,
|
|
|
- 'buy_value': total_buy_value,
|
|
|
- 'sell_value': total_sell_value,
|
|
|
- 'cycle_trades': pos_data['cycle_trades_details'].copy()
|
|
|
- }
|
|
|
- completed_cycles.append(completed_cycle)
|
|
|
-
|
|
|
- # Reset for next cycle, potentially flip if sell_amount_remaining > 0
|
|
|
- pos_data['cycle_trades_details'] = []
|
|
|
- pos_data['cycle_start_ts'] = None
|
|
|
- pos_data['current_leg_type'] = None
|
|
|
- if sell_amount_remaining > 1e-9: # Flipped to short
|
|
|
- pos_data['current_leg_type'] = 'short'
|
|
|
- pos_data['cycle_start_ts'] = timestamp
|
|
|
- pos_data['open_legs'].append({
|
|
|
- 'side': 'sell', 'amount_remaining': sell_amount_remaining, 'price': price,
|
|
|
- 'timestamp': timestamp, 'value': sell_amount_remaining * price,
|
|
|
- 'pnl_contribution': trade_pnl # PNL of this fill if it was part of closing previous and opening this
|
|
|
- })
|
|
|
- pos_data['cycle_trades_details'] = [current_trade_detail] # Start new details list with current trade
|
|
|
-
|
|
|
- elif (side == 'buy' and pos_data['current_leg_type'] == 'short'): # Buying to reduce/close short
|
|
|
- pos_data['cycle_trades_details'].append(current_trade_detail)
|
|
|
- buy_amount_remaining = amount
|
|
|
-
|
|
|
- while buy_amount_remaining > 0 and pos_data['open_legs']:
|
|
|
- oldest_leg_fill = pos_data['open_legs'][0] # FIFO
|
|
|
-
|
|
|
- match_amount = min(buy_amount_remaining, oldest_leg_fill['amount_remaining'])
|
|
|
-
|
|
|
- oldest_leg_fill['amount_remaining'] -= match_amount
|
|
|
- buy_amount_remaining -= match_amount
|
|
|
-
|
|
|
- if oldest_leg_fill['amount_remaining'] <= 1e-9:
|
|
|
- pos_data['open_legs'].pop(0)
|
|
|
-
|
|
|
- if not pos_data['open_legs']: # Short cycle closed
|
|
|
- # Compile cycle
|
|
|
- cycle_pnl = sum(t.get('pnl', 0.0) for t in pos_data['cycle_trades_details'])
|
|
|
-
|
|
|
- cycle_sells = [t for t in pos_data['cycle_trades_details'] if t['side'] == 'sell'] # Entry for short
|
|
|
- cycle_buys = [t for t in pos_data['cycle_trades_details'] if t['side'] == 'buy'] # Exit for short
|
|
|
- total_amount_sold = sum(t['amount'] for t in cycle_sells)
|
|
|
- total_sell_value = sum(t['value'] for t in cycle_sells)
|
|
|
- total_amount_bought = sum(t['amount'] for t in cycle_buys) # Should match total_amount_sold
|
|
|
- total_buy_value = sum(t['value'] for t in cycle_buys)
|
|
|
-
|
|
|
- completed_cycle = {
|
|
|
- 'symbol': symbol,
|
|
|
- 'token': symbol.split('/')[0] if '/' in symbol else symbol,
|
|
|
- 'cycle_start': pos_data['cycle_start_ts'],
|
|
|
- 'cycle_end': timestamp,
|
|
|
- 'cycle_type': 'short',
|
|
|
- 'sell_orders': len(cycle_sells), # Entry orders for short
|
|
|
- 'buy_orders': len(cycle_buys), # Exit orders for short
|
|
|
- 'total_orders': len(pos_data['cycle_trades_details']),
|
|
|
- 'total_amount': total_amount_sold, # Amount that formed the basis of the short
|
|
|
- 'avg_entry_price': total_sell_value / total_amount_sold if total_amount_sold > 0 else 0, # Avg sell price
|
|
|
- 'avg_exit_price': total_buy_value / total_amount_bought if total_amount_bought > 0 else 0, # Avg buy price
|
|
|
- 'total_pnl': cycle_pnl,
|
|
|
- 'sell_value': total_sell_value, # Entry value for short
|
|
|
- 'buy_value': total_buy_value, # Exit value for short
|
|
|
- 'cycle_trades': pos_data['cycle_trades_details'].copy()
|
|
|
- }
|
|
|
- completed_cycles.append(completed_cycle)
|
|
|
-
|
|
|
- # Reset for next cycle, potentially flip if buy_amount_remaining > 0
|
|
|
- pos_data['cycle_trades_details'] = []
|
|
|
- pos_data['cycle_start_ts'] = None
|
|
|
- pos_data['current_leg_type'] = None
|
|
|
- if buy_amount_remaining > 1e-9: # Flipped to long
|
|
|
- pos_data['current_leg_type'] = 'long'
|
|
|
- pos_data['cycle_start_ts'] = timestamp
|
|
|
- pos_data['open_legs'].append({
|
|
|
- 'side': 'buy', 'amount_remaining': buy_amount_remaining, 'price': price,
|
|
|
- 'timestamp': timestamp, 'value': buy_amount_remaining * price,
|
|
|
- 'pnl_contribution': trade_pnl
|
|
|
- })
|
|
|
- pos_data['cycle_trades_details'] = [current_trade_detail]
|
|
|
-
|
|
|
- return completed_cycles
|
|
|
+ total_pnl_from_cycles = token_stats_summary['total_pnl_from_cycles'] if token_stats_summary and token_stats_summary['total_pnl_from_cycles'] is not None else 0.0
|
|
|
+ total_completed_cycles_sum = token_stats_summary['total_completed_cycles_sum'] if token_stats_summary and token_stats_summary['total_completed_cycles_sum'] is not None else 0
|
|
|
|
|
|
- def get_basic_stats(self, current_balance: Optional[float] = None) -> Dict[str, Any]:
|
|
|
- """Get basic trading statistics from DB."""
|
|
|
- trades = self._fetch_query("SELECT COUNT(*) as count, side FROM trades GROUP BY side")
|
|
|
- total_trades_count = sum(t['count'] for t in trades)
|
|
|
- buy_trades_count = next((t['count'] for t in trades if t['side'] == 'buy'), 0)
|
|
|
- sell_trades_count = next((t['count'] for t in trades if t['side'] == 'sell'), 0)
|
|
|
+ # Total trades considered as sum of completed cycles and currently open positions
|
|
|
+ # This redefines 'total_trades' from its previous meaning of individual fills.
|
|
|
+ total_trades_redefined = total_completed_cycles_sum + open_positions_count
|
|
|
|
|
|
initial_balance_str = self._get_metadata('initial_balance')
|
|
|
initial_balance = float(initial_balance_str) if initial_balance_str else 0.0
|
|
@@ -492,77 +375,81 @@ class TradingStats:
|
|
|
start_date_iso = self._get_metadata('start_date')
|
|
|
start_date_obj = datetime.fromisoformat(start_date_iso) if start_date_iso else datetime.now(timezone.utc)
|
|
|
days_active = (datetime.now(timezone.utc) - start_date_obj).days + 1
|
|
|
-
|
|
|
- completed_cycles = self.calculate_completed_trade_cycles() # This can be expensive
|
|
|
- total_pnl_from_cycles = sum(cycle['total_pnl'] for cycle in completed_cycles)
|
|
|
|
|
|
- last_trade_row = self._fetchone_query("SELECT timestamp FROM trades ORDER BY timestamp DESC LIMIT 1")
|
|
|
- last_trade_ts = last_trade_row['timestamp'] if last_trade_row else None
|
|
|
+ # 'last_trade' timestamp could be the last update to token_stats or an open trade
|
|
|
+ last_activity_ts = token_stats_summary['overall_last_cycle_closed'] if token_stats_summary else None
|
|
|
+ last_open_trade_ts_row = self._fetchone_query("SELECT MAX(updated_at) as last_update FROM trades WHERE status = 'position_opened'")
|
|
|
+ if last_open_trade_ts_row and last_open_trade_ts_row['last_update']:
|
|
|
+ if not last_activity_ts or datetime.fromisoformat(last_open_trade_ts_row['last_update']) > datetime.fromisoformat(last_activity_ts):
|
|
|
+ last_activity_ts = last_open_trade_ts_row['last_update']
|
|
|
+
|
|
|
+ # Buy/Sell trades count from individual fills is no longer directly available for completed cycles.
|
|
|
+ # If needed, this requires schema change in token_stats or a different approach.
|
|
|
+ # For now, these are omitted from basic_stats.
|
|
|
|
|
|
return {
|
|
|
- 'total_trades': total_trades_count,
|
|
|
- 'completed_trades': len(completed_cycles),
|
|
|
- 'buy_trades': buy_trades_count,
|
|
|
- 'sell_trades': sell_trades_count,
|
|
|
+ 'total_trades': total_trades_redefined, # This is now cycles + open positions
|
|
|
+ 'completed_trades': total_completed_cycles_sum, # This is sum of total_completed_cycles from token_stats
|
|
|
+ # 'buy_trades': buy_trades_count, # Omitted
|
|
|
+ # 'sell_trades': sell_trades_count, # Omitted
|
|
|
'initial_balance': initial_balance,
|
|
|
- 'total_pnl': total_pnl_from_cycles, # PNL from closed cycles
|
|
|
+ 'total_pnl': total_pnl_from_cycles, # PNL from closed cycles via token_stats
|
|
|
'days_active': days_active,
|
|
|
'start_date': start_date_obj.strftime('%Y-%m-%d'),
|
|
|
- 'last_trade': last_trade_ts
|
|
|
+ 'last_trade': last_activity_ts, # Reflects last known activity (cycle close or open trade update)
|
|
|
+ 'open_positions_count': open_positions_count
|
|
|
}
|
|
|
|
|
|
def get_performance_stats(self) -> Dict[str, Any]:
|
|
|
- """Calculate advanced performance statistics using completed cycles."""
|
|
|
- completed_cycles = self.calculate_completed_trade_cycles()
|
|
|
-
|
|
|
- if not completed_cycles:
|
|
|
+ """Calculate advanced performance statistics using aggregated data from token_stats."""
|
|
|
+ query = """
|
|
|
+ SELECT
|
|
|
+ SUM(total_completed_cycles) as total_cycles,
|
|
|
+ SUM(winning_cycles) as total_wins,
|
|
|
+ SUM(losing_cycles) as total_losses,
|
|
|
+ SUM(sum_of_winning_pnl) as total_winning_pnl,
|
|
|
+ SUM(sum_of_losing_pnl) as total_losing_pnl, -- Stored positive
|
|
|
+ MAX(largest_winning_cycle_pnl) as overall_largest_win,
|
|
|
+ MAX(largest_losing_cycle_pnl) as overall_largest_loss -- Stored positive
|
|
|
+ FROM token_stats
|
|
|
+ """
|
|
|
+ summary = self._fetchone_query(query)
|
|
|
+
|
|
|
+ if not summary or summary['total_cycles'] is None or summary['total_cycles'] == 0:
|
|
|
return {
|
|
|
'win_rate': 0.0, 'profit_factor': 0.0, 'avg_win': 0.0, 'avg_loss': 0.0,
|
|
|
- 'largest_win': 0.0, 'largest_loss': 0.0, 'consecutive_wins': 0,
|
|
|
- 'consecutive_losses': 0, 'total_wins': 0, 'total_losses': 0, 'expectancy': 0.0
|
|
|
+ 'largest_win': 0.0, 'largest_loss': 0.0,
|
|
|
+ # 'consecutive_wins': 0, # Removed
|
|
|
+ # 'consecutive_losses': 0, # Removed
|
|
|
+ 'total_wins': 0, 'total_losses': 0, 'expectancy': 0.0
|
|
|
}
|
|
|
|
|
|
- wins_pnl = [c['total_pnl'] for c in completed_cycles if c['total_pnl'] > 0]
|
|
|
- losses_pnl = [abs(c['total_pnl']) for c in completed_cycles if c['total_pnl'] < 0] # Absolute values for losses
|
|
|
-
|
|
|
- total_wins_count = len(wins_pnl)
|
|
|
- total_losses_count = len(losses_pnl)
|
|
|
- total_completed_count = total_wins_count + total_losses_count
|
|
|
-
|
|
|
+ total_completed_count = summary['total_cycles']
|
|
|
+ total_wins_count = summary['total_wins'] if summary['total_wins'] is not None else 0
|
|
|
+ total_losses_count = summary['total_losses'] if summary['total_losses'] is not None else 0
|
|
|
+
|
|
|
win_rate = (total_wins_count / total_completed_count * 100) if total_completed_count > 0 else 0.0
|
|
|
|
|
|
- sum_of_wins = sum(wins_pnl)
|
|
|
- sum_of_losses = sum(losses_pnl) # Sum of absolute losses
|
|
|
+ sum_of_wins = summary['total_winning_pnl'] if summary['total_winning_pnl'] is not None else 0.0
|
|
|
+ sum_of_losses = summary['total_losing_pnl'] if summary['total_losing_pnl'] is not None else 0.0 # This is sum of absolute losses
|
|
|
|
|
|
profit_factor = (sum_of_wins / sum_of_losses) if sum_of_losses > 0 else float('inf') if sum_of_wins > 0 else 0.0
|
|
|
|
|
|
- avg_win = np.mean(wins_pnl) if wins_pnl else 0.0
|
|
|
- avg_loss = np.mean(losses_pnl) if losses_pnl else 0.0 # Avg of absolute losses
|
|
|
+ avg_win = (sum_of_wins / total_wins_count) if total_wins_count > 0 else 0.0
|
|
|
+ avg_loss = (sum_of_losses / total_losses_count) if total_losses_count > 0 else 0.0 # Avg of absolute losses
|
|
|
|
|
|
- largest_win = max(wins_pnl) if wins_pnl else 0.0
|
|
|
- largest_loss = max(losses_pnl) if losses_pnl else 0.0 # Largest absolute loss
|
|
|
-
|
|
|
- # Consecutive wins/losses
|
|
|
- consecutive_wins = 0
|
|
|
- consecutive_losses = 0
|
|
|
- current_wins = 0
|
|
|
- current_losses = 0
|
|
|
- for cycle in completed_cycles:
|
|
|
- if cycle['total_pnl'] > 0:
|
|
|
- current_wins += 1
|
|
|
- current_losses = 0
|
|
|
- else: # Assumes PNL is non-zero for a loss, or it's a scratch trade
|
|
|
- current_losses += 1
|
|
|
- current_wins = 0
|
|
|
- consecutive_wins = max(consecutive_wins, current_wins)
|
|
|
- consecutive_losses = max(consecutive_losses, current_losses)
|
|
|
+ largest_win = summary['overall_largest_win'] if summary['overall_largest_win'] is not None else 0.0
|
|
|
+ largest_loss = summary['overall_largest_loss'] if summary['overall_largest_loss'] is not None else 0.0 # Largest absolute loss
|
|
|
+
|
|
|
+ # Consecutive wins/losses removed as it's hard to track with this aggregation model.
|
|
|
|
|
|
expectancy = (avg_win * (win_rate / 100)) - (avg_loss * (1 - (win_rate / 100)))
|
|
|
|
|
|
return {
|
|
|
'win_rate': win_rate, 'profit_factor': profit_factor, 'avg_win': avg_win, 'avg_loss': avg_loss,
|
|
|
'largest_win': largest_win, 'largest_loss': largest_loss,
|
|
|
- 'consecutive_wins': consecutive_wins, 'consecutive_losses': consecutive_losses,
|
|
|
+ # 'consecutive_wins': consecutive_wins, # Removed
|
|
|
+ # 'consecutive_losses': consecutive_losses, # Removed
|
|
|
'total_wins': total_wins_count, 'total_losses': total_losses_count, 'expectancy': expectancy
|
|
|
}
|
|
|
|
|
@@ -670,11 +557,11 @@ class TradingStats:
|
|
|
stats_text_parts.append(f"• Initial Balance: {formatter.format_price_with_symbol(initial_bal)}")
|
|
|
stats_text_parts.append(f"• {pnl_emoji} Total P&L: {formatter.format_price_with_symbol(total_pnl_val)} ({total_return_pct:+.2f}%)\n")
|
|
|
stats_text_parts.append(f"\n📈 <b>Trading Activity:</b>")
|
|
|
- stats_text_parts.append(f"• Total Orders: {basic['total_trades']}")
|
|
|
+ stats_text_parts.append(f"• Total Orders (Cycles + Open): {basic['total_trades']}") # Clarified meaning
|
|
|
stats_text_parts.append(f"• Completed Trades (Cycles): {basic['completed_trades']}")
|
|
|
- stats_text_parts.append(f"• Open Positions: {open_positions_count}")
|
|
|
+ stats_text_parts.append(f"• Open Positions: {open_positions_count}") # Using dedicated count
|
|
|
stats_text_parts.append(f"• Days Active: {basic['days_active']}\n")
|
|
|
- stats_text_parts.append(f"\n🏆 <b>Performance Metrics:</b>")
|
|
|
+ stats_text_parts.append(f"\n🏆 <b>Performance Metrics (Completed Cycles):</b>") # Clarified scope
|
|
|
stats_text_parts.append(f"• Win Rate: {perf['win_rate']:.1f}% ({perf['total_wins']}W/{perf['total_losses']}L)")
|
|
|
stats_text_parts.append(f"• Profit Factor: {perf['profit_factor']:.2f}")
|
|
|
stats_text_parts.append(f"• Avg Win: {formatter.format_price_with_symbol(perf['avg_win'])} | Avg Loss: {formatter.format_price_with_symbol(perf['avg_loss'])}")
|
|
@@ -687,10 +574,28 @@ class TradingStats:
|
|
|
stats_text_parts.append(f"• Withdrawals: {formatter.format_price_with_symbol(adjustments_summary['total_withdrawals'])}")
|
|
|
stats_text_parts.append(f"• {adj_emoji} Net: {formatter.format_price_with_symbol(adjustments_summary['net_adjustment'])} ({adjustments_summary['adjustment_count']} transactions)")
|
|
|
|
|
|
- stats_text_parts.append(f"\n\n🎯 <b>Trade Distribution:</b>")
|
|
|
- stats_text_parts.append(f"• Buy Orders: {basic['buy_trades']} | Sell Orders: {basic['sell_trades']}")
|
|
|
- stats_text_parts.append(f"• Volume Traded (Sells): {formatter.format_price_with_symbol(total_sell_volume)}")
|
|
|
- stats_text_parts.append(f"• Avg Sell Trade Size: {formatter.format_price_with_symbol(avg_trade_size_sell)}\n")
|
|
|
+ stats_text_parts.append(f"\n\n🎯 <b>Trade Distribution (Completed Cycles):</b>") # Renamed & Clarified
|
|
|
+ # The old buy_trades/sell_trades counts are no longer directly available from basic_stats
|
|
|
+ # We can show total completed cycle volume from token_stats if desired.
|
|
|
+ # For now, removing the detailed buy/sell order counts and specific sell volume.
|
|
|
+ # Consider adding total volume from token_stats later if needed.
|
|
|
+ # stats_text_parts.append(f"• Buy Orders: {basic['buy_trades']} | Sell Orders: {basic['sell_trades']}")
|
|
|
+ # stats_text_parts.append(f"• Volume Traded (Sells): {formatter.format_price_with_symbol(total_sell_volume)}")
|
|
|
+ # stats_text_parts.append(f"• Avg Sell Trade Size: {formatter.format_price_with_symbol(avg_trade_size_sell)}\n")
|
|
|
+
|
|
|
+ # Let's add total completed volume as a more relevant stat now
|
|
|
+ overall_token_stats_summary = self._fetchone_query(
|
|
|
+ "SELECT SUM(total_entry_volume) as total_entry, SUM(total_exit_volume) as total_exit FROM token_stats"
|
|
|
+ )
|
|
|
+ total_entry_vol_all_cycles = 0.0
|
|
|
+ total_exit_vol_all_cycles = 0.0
|
|
|
+ if overall_token_stats_summary:
|
|
|
+ total_entry_vol_all_cycles = overall_token_stats_summary.get('total_entry', 0.0) or 0.0
|
|
|
+ total_exit_vol_all_cycles = overall_token_stats_summary.get('total_exit', 0.0) or 0.0
|
|
|
+
|
|
|
+ stats_text_parts.append(f"• Total Entry Volume (Cycles): {formatter.format_price_with_symbol(total_entry_vol_all_cycles)}")
|
|
|
+ stats_text_parts.append(f"• Total Exit Volume (Cycles): {formatter.format_price_with_symbol(total_exit_vol_all_cycles)}\n")
|
|
|
+
|
|
|
stats_text_parts.append(f"\n⏰ <b>Session Info:</b>")
|
|
|
stats_text_parts.append(f"• Started: {basic['start_date']}")
|
|
|
stats_text_parts.append(f"• Last Update: {datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC')}")
|
|
@@ -702,173 +607,164 @@ class TradingStats:
|
|
|
return f"""📊 <b>Trading Statistics</b>\n\n❌ <b>Error loading statistics</b>\n\n🔧 <b>Debug info:</b> {str(e)[:100]}"""
|
|
|
|
|
|
def get_recent_trades(self, limit: int = 10) -> List[Dict[str, Any]]:
|
|
|
- """Get recent trades from DB."""
|
|
|
- return self._fetch_query("SELECT * FROM trades ORDER BY timestamp DESC LIMIT ?", (limit,))
|
|
|
+ """Get recent trades from DB (these are active/open trades, as completed ones are migrated)."""
|
|
|
+ return self._fetch_query("SELECT * FROM trades WHERE status = 'position_opened' ORDER BY updated_at DESC LIMIT ?", (limit,))
|
|
|
|
|
|
def get_token_performance(self) -> Dict[str, Dict[str, Any]]:
|
|
|
- """Get performance statistics grouped by token using completed cycles."""
|
|
|
- completed_cycles = self.calculate_completed_trade_cycles()
|
|
|
- token_performance = {}
|
|
|
-
|
|
|
- # Group cycles by token (symbol's base part)
|
|
|
- token_cycles_map = defaultdict(list)
|
|
|
- for cycle in completed_cycles:
|
|
|
- token_cycles_map[cycle['token']].append(cycle)
|
|
|
+ """Get performance statistics grouped by token using the token_stats table."""
|
|
|
+ all_token_stats = self._fetch_query("SELECT * FROM token_stats ORDER BY token ASC")
|
|
|
|
|
|
- for token, cycles_for_token in token_cycles_map.items():
|
|
|
- if not cycles_for_token: continue
|
|
|
-
|
|
|
- wins_pnl = [c['total_pnl'] for c in cycles_for_token if c['total_pnl'] > 0]
|
|
|
- losses_pnl = [abs(c['total_pnl']) for c in cycles_for_token if c['total_pnl'] < 0]
|
|
|
-
|
|
|
- total_pnl = sum(c['total_pnl'] for c in cycles_for_token)
|
|
|
- total_volume_sold = sum(c['sell_value'] for c in cycles_for_token) # Based on sell value in cycle
|
|
|
+ token_performance_map = {}
|
|
|
+ for record in all_token_stats:
|
|
|
+ token = record['token']
|
|
|
+ total_pnl = record.get('total_realized_pnl', 0.0)
|
|
|
+ # total_volume_sold now refers to total_exit_volume from token_stats
|
|
|
+ total_volume = record.get('total_exit_volume', 0.0)
|
|
|
|
|
|
- pnl_percentage = (total_pnl / total_volume_sold * 100) if total_volume_sold > 0 else 0.0
|
|
|
+ pnl_percentage = (total_pnl / total_volume * 100) if total_volume > 0 else 0.0
|
|
|
|
|
|
- total_wins_count = len(wins_pnl)
|
|
|
- total_losses_count = len(losses_pnl)
|
|
|
- total_completed_count = total_wins_count + total_losses_count
|
|
|
+ total_completed_count = record.get('total_completed_cycles', 0)
|
|
|
+ total_wins_count = record.get('winning_cycles', 0)
|
|
|
+ total_losses_count = record.get('losing_cycles', 0)
|
|
|
|
|
|
win_rate = (total_wins_count / total_completed_count * 100) if total_completed_count > 0 else 0.0
|
|
|
|
|
|
- sum_of_wins = sum(wins_pnl)
|
|
|
- sum_of_losses = sum(losses_pnl)
|
|
|
+ sum_of_wins = record.get('sum_of_winning_pnl', 0.0)
|
|
|
+ sum_of_losses = record.get('sum_of_losing_pnl', 0.0) # Stored positive
|
|
|
profit_factor = (sum_of_wins / sum_of_losses) if sum_of_losses > 0 else float('inf') if sum_of_wins > 0 else 0.0
|
|
|
|
|
|
- avg_win = np.mean(wins_pnl) if wins_pnl else 0.0
|
|
|
- avg_loss = np.mean(losses_pnl) if losses_pnl else 0.0
|
|
|
+ avg_win = (sum_of_wins / total_wins_count) if total_wins_count > 0 else 0.0
|
|
|
+ avg_loss = (sum_of_losses / total_losses_count) if total_losses_count > 0 else 0.0
|
|
|
expectancy = (avg_win * (win_rate / 100)) - (avg_loss * (1 - (win_rate / 100)))
|
|
|
|
|
|
- largest_win = max(wins_pnl) if wins_pnl else 0.0
|
|
|
- largest_loss = max(losses_pnl) if losses_pnl else 0.0
|
|
|
-
|
|
|
- token_performance[token] = {
|
|
|
- 'total_pnl': total_pnl, 'pnl_percentage': pnl_percentage,
|
|
|
- 'completed_trades': total_completed_count, 'total_volume': total_volume_sold,
|
|
|
- 'win_rate': win_rate, 'total_wins': total_wins_count, 'total_losses': total_losses_count,
|
|
|
- 'profit_factor': profit_factor, 'expectancy': expectancy,
|
|
|
- 'largest_win': largest_win, 'largest_loss': largest_loss,
|
|
|
- 'avg_win': avg_win, 'avg_loss': avg_loss
|
|
|
- # 'cycles': cycles_for_token # Optionally include raw cycle data
|
|
|
+ largest_win = record.get('largest_winning_cycle_pnl', 0.0)
|
|
|
+ largest_loss = record.get('largest_losing_cycle_pnl', 0.0) # Stored positive
|
|
|
+
|
|
|
+ token_performance_map[token] = {
|
|
|
+ 'token': token, # Added for easier access if iterating over values
|
|
|
+ 'total_pnl': total_pnl,
|
|
|
+ 'pnl_percentage': pnl_percentage,
|
|
|
+ 'completed_trades': total_completed_count,
|
|
|
+ 'total_volume': total_volume, # This is total_exit_volume
|
|
|
+ 'win_rate': win_rate,
|
|
|
+ 'total_wins': total_wins_count,
|
|
|
+ 'total_losses': total_losses_count,
|
|
|
+ 'profit_factor': profit_factor,
|
|
|
+ 'expectancy': expectancy,
|
|
|
+ 'largest_win': largest_win,
|
|
|
+ 'largest_loss': largest_loss,
|
|
|
+ 'avg_win': avg_win,
|
|
|
+ 'avg_loss': avg_loss,
|
|
|
+ 'first_cycle_closed_at': record.get('first_cycle_closed_at'),
|
|
|
+ 'last_cycle_closed_at': record.get('last_cycle_closed_at'),
|
|
|
+ 'total_cancelled_cycles': record.get('total_cancelled_cycles', 0)
|
|
|
}
|
|
|
- return token_performance
|
|
|
+ return token_performance_map
|
|
|
|
|
|
def get_token_detailed_stats(self, token: str) -> Dict[str, Any]:
|
|
|
- """Get detailed statistics for a specific token using DB queries and cycle calculation."""
|
|
|
+ """Get detailed statistics for a specific token using token_stats and current open trades."""
|
|
|
upper_token = _normalize_token_case(token)
|
|
|
|
|
|
- # Get all trades for this specific token (symbol starts with token + '/')
|
|
|
- # This is simpler than trying to filter cycles by token string directly in SQL for complex symbols
|
|
|
- all_trades_for_token_symbol_prefix = self._fetch_query(
|
|
|
- "SELECT * FROM trades WHERE symbol LIKE ? ORDER BY timestamp ASC", (f"{upper_token}/%",)
|
|
|
+ # Get aggregated performance from token_stats
|
|
|
+ token_agg_stats = self._fetchone_query("SELECT * FROM token_stats WHERE token = ?", (upper_token,))
|
|
|
+
|
|
|
+ # Get currently open trades for this token from the 'trades' table (not yet migrated)
|
|
|
+ # These are not completed cycles but represent current exposure.
|
|
|
+ open_trades_for_token = self._fetch_query(
|
|
|
+ "SELECT * FROM trades WHERE symbol LIKE ? AND status = 'position_opened' ORDER BY timestamp ASC",
|
|
|
+ (f"{upper_token}/%",)
|
|
|
)
|
|
|
|
|
|
- if not all_trades_for_token_symbol_prefix:
|
|
|
+ if not token_agg_stats and not open_trades_for_token:
|
|
|
return {
|
|
|
'token': upper_token, 'total_trades': 0, 'total_pnl': 0.0, 'win_rate': 0.0,
|
|
|
- 'message': f"No trading history found for {upper_token}"
|
|
|
+ 'message': f"No trading history or open positions found for {upper_token}"
|
|
|
}
|
|
|
|
|
|
- # Calculate completed cycles specifically for these trades
|
|
|
- # To correctly calculate cycles for *only* this token, we need to run the FIFO logic
|
|
|
- # on trades filtered for this token.
|
|
|
- # The global `calculate_completed_trade_cycles` uses *all* trades.
|
|
|
- all_completed_cycles = self.calculate_completed_trade_cycles()
|
|
|
- token_cycles = [c for c in all_completed_cycles if _normalize_token_case(c['token']) == upper_token]
|
|
|
-
|
|
|
- total_individual_orders = len(all_trades_for_token_symbol_prefix)
|
|
|
- buy_orders = len([t for t in all_trades_for_token_symbol_prefix if t['side'] == 'buy'])
|
|
|
- sell_orders = len([t for t in all_trades_for_token_symbol_prefix if t['side'] == 'sell'])
|
|
|
- total_volume_all_orders = sum(t['value'] for t in all_trades_for_token_symbol_prefix)
|
|
|
-
|
|
|
- if not token_cycles:
|
|
|
- return {
|
|
|
- 'token': upper_token, 'total_trades': total_individual_orders, 'buy_trades': buy_orders,
|
|
|
- 'sell_trades': sell_orders, 'total_volume': total_volume_all_orders,
|
|
|
+ # Initialize with empty performance if no aggregated data
|
|
|
+ perf_stats = {}
|
|
|
+ if token_agg_stats:
|
|
|
+ perf_stats = {
|
|
|
+ 'completed_trades': token_agg_stats.get('total_completed_cycles', 0),
|
|
|
+ 'total_pnl': token_agg_stats.get('total_realized_pnl', 0.0),
|
|
|
+ 'pnl_percentage': 0.0, # Recalculate if needed, or store avg pnl_percentage
|
|
|
+ 'win_rate': 0.0,
|
|
|
+ 'profit_factor': token_agg_stats.get('profit_factor'), # Placeholder, need to calc from sums
|
|
|
+ 'avg_win': 0.0,
|
|
|
+ 'avg_loss': 0.0,
|
|
|
+ 'largest_win': token_agg_stats.get('largest_winning_cycle_pnl', 0.0),
|
|
|
+ 'largest_loss': token_agg_stats.get('largest_losing_cycle_pnl', 0.0),
|
|
|
+ 'expectancy': 0.0,
|
|
|
+ 'total_wins': token_agg_stats.get('winning_cycles',0),
|
|
|
+ 'total_losses': token_agg_stats.get('losing_cycles',0),
|
|
|
+ 'completed_entry_volume': token_agg_stats.get('total_entry_volume', 0.0),
|
|
|
+ 'completed_exit_volume': token_agg_stats.get('total_exit_volume', 0.0),
|
|
|
+ 'total_cancelled': token_agg_stats.get('total_cancelled_cycles', 0)
|
|
|
+ }
|
|
|
+ if perf_stats['completed_trades'] > 0:
|
|
|
+ perf_stats['win_rate'] = (perf_stats['total_wins'] / perf_stats['completed_trades'] * 100) if perf_stats['completed_trades'] > 0 else 0.0
|
|
|
+ sum_wins = token_agg_stats.get('sum_of_winning_pnl', 0.0)
|
|
|
+ sum_losses = token_agg_stats.get('sum_of_losing_pnl', 0.0)
|
|
|
+ perf_stats['profit_factor'] = (sum_wins / sum_losses) if sum_losses > 0 else float('inf') if sum_wins > 0 else 0.0
|
|
|
+ perf_stats['avg_win'] = (sum_wins / perf_stats['total_wins']) if perf_stats['total_wins'] > 0 else 0.0
|
|
|
+ perf_stats['avg_loss'] = (sum_losses / perf_stats['total_losses']) if perf_stats['total_losses'] > 0 else 0.0
|
|
|
+ perf_stats['expectancy'] = (perf_stats['avg_win'] * (perf_stats['win_rate'] / 100)) - (perf_stats['avg_loss'] * (1 - (perf_stats['win_rate'] / 100)))
|
|
|
+ if perf_stats['completed_exit_volume'] > 0:
|
|
|
+ perf_stats['pnl_percentage'] = (perf_stats['total_pnl'] / perf_stats['completed_exit_volume'] * 100)
|
|
|
+ else: # No completed cycles for this token yet
|
|
|
+ perf_stats = {
|
|
|
'completed_trades': 0, 'total_pnl': 0.0, 'pnl_percentage': 0.0, 'win_rate': 0.0,
|
|
|
- 'message': f"{upper_token} has open positions or trades but no completed trade cycles yet."
|
|
|
+ 'profit_factor': 0.0, 'avg_win': 0.0, 'avg_loss': 0.0, 'largest_win': 0.0, 'largest_loss': 0.0,
|
|
|
+ 'expectancy': 0.0, 'total_wins':0, 'total_losses':0,
|
|
|
+ 'completed_entry_volume': 0.0, 'completed_exit_volume': 0.0, 'total_cancelled': 0
|
|
|
}
|
|
|
|
|
|
- # Performance based on this token's completed cycles
|
|
|
- perf_stats = self.get_token_performance().get(upper_token, {}) # Re-use general calculation logic
|
|
|
+ # Info about open positions for this token (raw trades, not cycles)
|
|
|
+ open_positions_summary = []
|
|
|
+ total_open_value = 0.0
|
|
|
+ total_open_unrealized_pnl = 0.0
|
|
|
+ for op_trade in open_trades_for_token:
|
|
|
+ open_positions_summary.append({
|
|
|
+ 'lifecycle_id': op_trade.get('trade_lifecycle_id'),
|
|
|
+ 'side': op_trade.get('position_side'),
|
|
|
+ 'amount': op_trade.get('current_position_size'),
|
|
|
+ 'entry_price': op_trade.get('entry_price'),
|
|
|
+ 'mark_price': op_trade.get('mark_price'),
|
|
|
+ 'unrealized_pnl': op_trade.get('unrealized_pnl'),
|
|
|
+ 'opened_at': op_trade.get('position_opened_at')
|
|
|
+ })
|
|
|
+ total_open_value += op_trade.get('value', 0.0) # Initial value of open positions
|
|
|
+ total_open_unrealized_pnl += op_trade.get('unrealized_pnl', 0.0)
|
|
|
+
|
|
|
+ # Raw individual orders from 'orders' table for this token can be complex to summarize here
|
|
|
+ # The old version counted 'buy_orders' and 'sell_orders' from all trades for the token.
|
|
|
+ # This is no longer straightforward for completed cycles.
|
|
|
+ # We can count open orders for this token.
|
|
|
+ open_orders_count_row = self._fetchone_query(
|
|
|
+ "SELECT COUNT(*) as count FROM orders WHERE symbol LIKE ? AND status IN ('open', 'submitted', 'pending_trigger')",
|
|
|
+ (f"{upper_token}/%",)
|
|
|
+ )
|
|
|
+ current_open_orders_for_token = open_orders_count_row['count'] if open_orders_count_row else 0
|
|
|
|
|
|
- # Filter for recent closed trades
|
|
|
- recent_closed_trades = [t for t in all_trades_for_token_symbol_prefix if t.get('status') == 'position_closed']
|
|
|
+ # 'total_trades' here could mean total orders ever placed for this token, or completed cycles + open positions
|
|
|
+ # Let's define it as completed cycles + number of currently open positions for consistency with get_basic_stats
|
|
|
+ effective_total_trades = perf_stats['completed_trades'] + len(open_trades_for_token)
|
|
|
|
|
|
return {
|
|
|
'token': upper_token,
|
|
|
- 'total_trades': total_individual_orders,
|
|
|
- 'completed_trades': perf_stats.get('completed_trades', 0),
|
|
|
- 'buy_trades': buy_orders,
|
|
|
- 'sell_trades': sell_orders,
|
|
|
- 'total_volume': total_volume_all_orders, # Volume of all orders for this token
|
|
|
- 'completed_volume': perf_stats.get('total_volume', 0.0), # Volume from completed cycles
|
|
|
- 'total_pnl': perf_stats.get('total_pnl', 0.0),
|
|
|
- 'pnl_percentage': perf_stats.get('pnl_percentage', 0.0),
|
|
|
- 'win_rate': perf_stats.get('win_rate', 0.0),
|
|
|
- 'profit_factor': perf_stats.get('profit_factor', 0.0),
|
|
|
- 'avg_win': perf_stats.get('avg_win', 0.0),
|
|
|
- 'avg_loss': perf_stats.get('avg_loss', 0.0),
|
|
|
- 'largest_win': perf_stats.get('largest_win', 0.0),
|
|
|
- 'largest_loss': perf_stats.get('largest_loss', 0.0),
|
|
|
- 'expectancy': perf_stats.get('expectancy', 0.0),
|
|
|
- 'total_wins': perf_stats.get('total_wins',0),
|
|
|
- 'total_losses': perf_stats.get('total_losses',0),
|
|
|
- 'recent_trades': recent_closed_trades[-5:], # Last 5 CLOSET trades for this token
|
|
|
- 'cycles': token_cycles # Optionally include raw cycle data
|
|
|
+ 'message': f"Statistics for {upper_token}",
|
|
|
+ 'performance_summary': perf_stats, # From token_stats table
|
|
|
+ 'open_positions': open_positions_summary, # List of currently open positions
|
|
|
+ 'open_positions_count': len(open_trades_for_token),
|
|
|
+ 'current_open_orders_count': current_open_orders_for_token,
|
|
|
+ 'summary_total_trades': effective_total_trades, # Completed cycles + open positions
|
|
|
+ 'summary_total_realized_pnl': perf_stats['total_pnl'],
|
|
|
+ 'summary_total_unrealized_pnl': total_open_unrealized_pnl,
|
|
|
+ # 'cycles': token_cycles # Raw cycle data for completed trades is no longer stored directly after migration
|
|
|
}
|
|
|
|
|
|
- def _get_aggregated_period_stats_from_cycles(self) -> Dict[str, Dict[str, Dict[str, Any]]]:
|
|
|
- """Helper to aggregate completed cycles by day, week, month for P&L and volume."""
|
|
|
- completed_cycles = self.calculate_completed_trade_cycles()
|
|
|
-
|
|
|
- daily_aggr = defaultdict(lambda: {'trades': 0, 'pnl': 0.0, 'volume': 0.0})
|
|
|
- weekly_aggr = defaultdict(lambda: {'trades': 0, 'pnl': 0.0, 'volume': 0.0})
|
|
|
- monthly_aggr = defaultdict(lambda: {'trades': 0, 'pnl': 0.0, 'volume': 0.0})
|
|
|
-
|
|
|
- for cycle in completed_cycles:
|
|
|
- try:
|
|
|
- # Use cycle_end timestamp (string) and parse it
|
|
|
- end_dt = datetime.fromisoformat(cycle['cycle_end'])
|
|
|
- if end_dt.tzinfo is None: # Ensure timezone aware for proper calculations
|
|
|
- end_dt = end_dt.replace(tzinfo=timezone.utc)
|
|
|
- else:
|
|
|
- end_dt = end_dt.astimezone(timezone.utc)
|
|
|
-
|
|
|
- pnl = cycle['total_pnl']
|
|
|
- volume = cycle['sell_value'] # Volume based on sell value of the cycle
|
|
|
-
|
|
|
- # Daily
|
|
|
- day_key = end_dt.strftime('%Y-%m-%d')
|
|
|
- daily_aggr[day_key]['trades'] += 1
|
|
|
- daily_aggr[day_key]['pnl'] += pnl
|
|
|
- daily_aggr[day_key]['volume'] += volume
|
|
|
-
|
|
|
- # Weekly (YYYY-Www, where ww is week number 00-53, Monday as first day)
|
|
|
- week_key = end_dt.strftime('%Y-W%W') # %W for Monday as first day
|
|
|
- weekly_aggr[week_key]['trades'] += 1
|
|
|
- weekly_aggr[week_key]['pnl'] += pnl
|
|
|
- weekly_aggr[week_key]['volume'] += volume
|
|
|
-
|
|
|
- # Monthly
|
|
|
- month_key = end_dt.strftime('%Y-%m')
|
|
|
- monthly_aggr[month_key]['trades'] += 1
|
|
|
- monthly_aggr[month_key]['pnl'] += pnl
|
|
|
- monthly_aggr[month_key]['volume'] += volume
|
|
|
- except Exception as e:
|
|
|
- logger.warning(f"Could not parse cycle_end '{cycle.get('cycle_end')}' for periodic stats: {e}")
|
|
|
- continue
|
|
|
-
|
|
|
- for aggr_dict in [daily_aggr, weekly_aggr, monthly_aggr]:
|
|
|
- for stats in aggr_dict.values():
|
|
|
- stats['pnl_pct'] = (stats['pnl'] / stats['volume'] * 100) if stats['volume'] > 0 else 0.0
|
|
|
-
|
|
|
- return {'daily': dict(daily_aggr), 'weekly': dict(weekly_aggr), 'monthly': dict(monthly_aggr)}
|
|
|
-
|
|
|
def get_daily_stats(self, limit: int = 10) -> List[Dict[str, Any]]:
|
|
|
- """Get daily performance stats for the last N days."""
|
|
|
- period_aggregates = self._get_aggregated_period_stats_from_cycles()['daily']
|
|
|
+ """Get daily performance stats for the last N days from daily_aggregated_stats."""
|
|
|
daily_stats_list = []
|
|
|
today_utc = datetime.now(timezone.utc).date()
|
|
|
|
|
@@ -877,8 +773,25 @@ class TradingStats:
|
|
|
date_str = target_date.strftime('%Y-%m-%d')
|
|
|
date_formatted = target_date.strftime('%m/%d') # For display
|
|
|
|
|
|
- stats_for_day = period_aggregates.get(date_str)
|
|
|
- if stats_for_day:
|
|
|
+ # Query for all tokens for that day and sum them up
|
|
|
+ # Or, if daily_aggregated_stats stores an _OVERALL_ record, query that.
|
|
|
+ # Assuming for now we sum up all token records for a given day.
|
|
|
+ day_aggregated_data = self._fetch_query(
|
|
|
+ "SELECT SUM(realized_pnl) as pnl, SUM(completed_cycles) as trades, SUM(exit_volume) as volume FROM daily_aggregated_stats WHERE date = ?",
|
|
|
+ (date_str,)
|
|
|
+ )
|
|
|
+
|
|
|
+ stats_for_day = None
|
|
|
+ if day_aggregated_data and len(day_aggregated_data) > 0 and day_aggregated_data[0]['trades'] is not None:
|
|
|
+ stats_for_day = day_aggregated_data[0]
|
|
|
+ # Calculate pnl_pct if volume is present and positive
|
|
|
+ pnl = stats_for_day.get('pnl', 0.0) or 0.0
|
|
|
+ volume = stats_for_day.get('volume', 0.0) or 0.0
|
|
|
+ stats_for_day['pnl_pct'] = (pnl / volume * 100) if volume > 0 else 0.0
|
|
|
+ # Ensure trades is an int
|
|
|
+ stats_for_day['trades'] = int(stats_for_day.get('trades', 0) or 0)
|
|
|
+
|
|
|
+ if stats_for_day and stats_for_day['trades'] > 0:
|
|
|
daily_stats_list.append({
|
|
|
'date': date_str, 'date_formatted': date_formatted, 'has_trades': True,
|
|
|
**stats_for_day
|
|
@@ -891,59 +804,100 @@ class TradingStats:
|
|
|
return daily_stats_list
|
|
|
|
|
|
def get_weekly_stats(self, limit: int = 10) -> List[Dict[str, Any]]:
|
|
|
- """Get weekly performance stats for the last N weeks."""
|
|
|
- period_aggregates = self._get_aggregated_period_stats_from_cycles()['weekly']
|
|
|
+ """Get weekly performance stats for the last N weeks by aggregating daily_aggregated_stats."""
|
|
|
weekly_stats_list = []
|
|
|
today_utc = datetime.now(timezone.utc).date()
|
|
|
|
|
|
for i in range(limit):
|
|
|
- # Target week starts on Monday 'i' weeks ago
|
|
|
target_monday = today_utc - timedelta(days=today_utc.weekday() + (i * 7))
|
|
|
target_sunday = target_monday + timedelta(days=6)
|
|
|
|
|
|
- week_key = target_monday.strftime('%Y-W%W') # %W for Monday as first day
|
|
|
- week_formatted = f"{target_monday.strftime('%m/%d')}-{target_sunday.strftime('%m/%d')}"
|
|
|
+ week_key_display = f"{target_monday.strftime('%Y-W%W')}" # For internal key if needed
|
|
|
+ week_formatted_display = f"{target_monday.strftime('%m/%d')}-{target_sunday.strftime('%m/%d/%y')}"
|
|
|
|
|
|
- stats_for_week = period_aggregates.get(week_key)
|
|
|
- if stats_for_week:
|
|
|
- weekly_stats_list.append({
|
|
|
- 'week': week_key, 'week_formatted': week_formatted, 'has_trades': True,
|
|
|
- **stats_for_week
|
|
|
- })
|
|
|
+ # Fetch daily records for this week range
|
|
|
+ daily_records_for_week = self._fetch_query(
|
|
|
+ "SELECT date, realized_pnl, completed_cycles, exit_volume FROM daily_aggregated_stats WHERE date BETWEEN ? AND ?",
|
|
|
+ (target_monday.strftime('%Y-%m-%d'), target_sunday.strftime('%Y-%m-%d'))
|
|
|
+ )
|
|
|
+
|
|
|
+ if daily_records_for_week:
|
|
|
+ total_pnl_week = sum(d.get('realized_pnl', 0.0) or 0.0 for d in daily_records_for_week)
|
|
|
+ total_trades_week = sum(d.get('completed_cycles', 0) or 0 for d in daily_records_for_week)
|
|
|
+ total_volume_week = sum(d.get('exit_volume', 0.0) or 0.0 for d in daily_records_for_week)
|
|
|
+ pnl_pct_week = (total_pnl_week / total_volume_week * 100) if total_volume_week > 0 else 0.0
|
|
|
+
|
|
|
+ if total_trades_week > 0:
|
|
|
+ weekly_stats_list.append({
|
|
|
+ 'week': week_key_display,
|
|
|
+ 'week_formatted': week_formatted_display,
|
|
|
+ 'has_trades': True,
|
|
|
+ 'pnl': total_pnl_week,
|
|
|
+ 'trades': total_trades_week,
|
|
|
+ 'volume': total_volume_week,
|
|
|
+ 'pnl_pct': pnl_pct_week
|
|
|
+ })
|
|
|
+ else:
|
|
|
+ weekly_stats_list.append({
|
|
|
+ 'week': week_key_display, 'week_formatted': week_formatted_display, 'has_trades': False,
|
|
|
+ 'trades': 0, 'pnl': 0.0, 'volume': 0.0, 'pnl_pct': 0.0
|
|
|
+ })
|
|
|
else:
|
|
|
weekly_stats_list.append({
|
|
|
- 'week': week_key, 'week_formatted': week_formatted, 'has_trades': False,
|
|
|
+ 'week': week_key_display, 'week_formatted': week_formatted_display, 'has_trades': False,
|
|
|
'trades': 0, 'pnl': 0.0, 'volume': 0.0, 'pnl_pct': 0.0
|
|
|
})
|
|
|
return weekly_stats_list
|
|
|
|
|
|
def get_monthly_stats(self, limit: int = 10) -> List[Dict[str, Any]]:
|
|
|
- """Get monthly performance stats for the last N months."""
|
|
|
- period_aggregates = self._get_aggregated_period_stats_from_cycles()['monthly']
|
|
|
+ """Get monthly performance stats for the last N months by aggregating daily_aggregated_stats."""
|
|
|
monthly_stats_list = []
|
|
|
current_month_start_utc = datetime.now(timezone.utc).date().replace(day=1)
|
|
|
|
|
|
for i in range(limit):
|
|
|
- # Calculate target month by subtracting months
|
|
|
year = current_month_start_utc.year
|
|
|
month = current_month_start_utc.month - i
|
|
|
- while month <= 0: # Adjust year if month goes to 0 or negative
|
|
|
+ while month <= 0:
|
|
|
month += 12
|
|
|
year -= 1
|
|
|
|
|
|
- target_month_date = datetime(year, month, 1, tzinfo=timezone.utc).date()
|
|
|
- month_key = target_month_date.strftime('%Y-%m')
|
|
|
- month_formatted = target_month_date.strftime('%b %Y')
|
|
|
+ target_month_start_date = datetime(year, month, 1, tzinfo=timezone.utc).date()
|
|
|
+ # Find end of target month
|
|
|
+ next_month_start_date = datetime(year + (month // 12), (month % 12) + 1, 1, tzinfo=timezone.utc).date() if month < 12 else datetime(year + 1, 1, 1, tzinfo=timezone.utc).date()
|
|
|
+ target_month_end_date = next_month_start_date - timedelta(days=1)
|
|
|
+
|
|
|
+ month_key_display = target_month_start_date.strftime('%Y-%m')
|
|
|
+ month_formatted_display = target_month_start_date.strftime('%b %Y')
|
|
|
|
|
|
- stats_for_month = period_aggregates.get(month_key)
|
|
|
- if stats_for_month:
|
|
|
- monthly_stats_list.append({
|
|
|
- 'month': month_key, 'month_formatted': month_formatted, 'has_trades': True,
|
|
|
- **stats_for_month
|
|
|
- })
|
|
|
+ daily_records_for_month = self._fetch_query(
|
|
|
+ "SELECT date, realized_pnl, completed_cycles, exit_volume FROM daily_aggregated_stats WHERE date BETWEEN ? AND ?",
|
|
|
+ (target_month_start_date.strftime('%Y-%m-%d'), target_month_end_date.strftime('%Y-%m-%d'))
|
|
|
+ )
|
|
|
+
|
|
|
+ if daily_records_for_month:
|
|
|
+ total_pnl_month = sum(d.get('realized_pnl', 0.0) or 0.0 for d in daily_records_for_month)
|
|
|
+ total_trades_month = sum(d.get('completed_cycles', 0) or 0 for d in daily_records_for_month)
|
|
|
+ total_volume_month = sum(d.get('exit_volume', 0.0) or 0.0 for d in daily_records_for_month)
|
|
|
+ pnl_pct_month = (total_pnl_month / total_volume_month * 100) if total_volume_month > 0 else 0.0
|
|
|
+
|
|
|
+ if total_trades_month > 0:
|
|
|
+ monthly_stats_list.append({
|
|
|
+ 'month': month_key_display,
|
|
|
+ 'month_formatted': month_formatted_display,
|
|
|
+ 'has_trades': True,
|
|
|
+ 'pnl': total_pnl_month,
|
|
|
+ 'trades': total_trades_month,
|
|
|
+ 'volume': total_volume_month,
|
|
|
+ 'pnl_pct': pnl_pct_month
|
|
|
+ })
|
|
|
+ else:
|
|
|
+ monthly_stats_list.append({
|
|
|
+ 'month': month_key_display, 'month_formatted': month_formatted_display, 'has_trades': False,
|
|
|
+ 'trades': 0, 'pnl': 0.0, 'volume': 0.0, 'pnl_pct': 0.0
|
|
|
+ })
|
|
|
else:
|
|
|
- monthly_stats_list.append({
|
|
|
- 'month': month_key, 'month_formatted': month_formatted, 'has_trades': False,
|
|
|
+ monthly_stats_list.append({
|
|
|
+ 'month': month_key_display, 'month_formatted': month_formatted_display, 'has_trades': False,
|
|
|
'trades': 0, 'pnl': 0.0, 'volume': 0.0, 'pnl_pct': 0.0
|
|
|
})
|
|
|
return monthly_stats_list
|
|
@@ -1661,3 +1615,139 @@ class TradingStats:
|
|
|
"""Get the total number of daily balance records."""
|
|
|
row = self._fetchone_query("SELECT COUNT(*) as count FROM daily_balances")
|
|
|
return row['count'] if row and 'count' in row else 0
|
|
|
+
|
|
|
+ # 🆕 PHASE 5: AGGREGATION AND PURGING LOGIC
|
|
|
+ def _migrate_trade_to_aggregated_stats(self, trade_lifecycle_id: str):
|
|
|
+ """Migrate a completed/cancelled trade's stats to aggregate tables and delete the original trade."""
|
|
|
+ trade_data = self.get_trade_by_lifecycle_id(trade_lifecycle_id)
|
|
|
+ if not trade_data:
|
|
|
+ logger.error(f"Cannot migrate trade {trade_lifecycle_id}: Not found.")
|
|
|
+ return
|
|
|
+
|
|
|
+ status = trade_data.get('status')
|
|
|
+ symbol = trade_data.get('symbol')
|
|
|
+ token = symbol.split('/')[0] if symbol and '/' in symbol else symbol # Assuming symbol like BTC/USDT
|
|
|
+ if not token:
|
|
|
+ logger.error(f"Cannot migrate trade {trade_lifecycle_id}: Token could not be derived from symbol '{symbol}'.")
|
|
|
+ return
|
|
|
+
|
|
|
+ now_iso = datetime.now(timezone.utc).isoformat()
|
|
|
+
|
|
|
+ try:
|
|
|
+ with self.conn: # Ensures atomicity for the operations below
|
|
|
+ if status == 'position_closed':
|
|
|
+ realized_pnl = trade_data.get('realized_pnl', 0.0)
|
|
|
+ # Use entry value if available, otherwise value (amount * price at entry)
|
|
|
+ entry_value = trade_data.get('value', 0.0) # 'value' is amount * price from initial trade record
|
|
|
+ # For exit_value, we'd ideally have the value of the closing trade(s).
|
|
|
+ # If the 'realized_pnl' is from the trade record, and 'entry_value' is entry, exit_value = entry_value + realized_pnl
|
|
|
+ exit_value = entry_value + realized_pnl
|
|
|
+ closed_at_str = trade_data.get('position_closed_at', now_iso)
|
|
|
+ closed_at_dt = datetime.fromisoformat(closed_at_str)
|
|
|
+ date_str = closed_at_dt.strftime('%Y-%m-%d')
|
|
|
+
|
|
|
+ # Update token_stats
|
|
|
+ token_upsert_query = """
|
|
|
+ INSERT INTO token_stats (
|
|
|
+ token, total_realized_pnl, total_completed_cycles, winning_cycles, losing_cycles,
|
|
|
+ total_entry_volume, total_exit_volume, sum_of_winning_pnl, sum_of_losing_pnl,
|
|
|
+ largest_winning_cycle_pnl, largest_losing_cycle_pnl,
|
|
|
+ first_cycle_closed_at, last_cycle_closed_at, updated_at
|
|
|
+ ) VALUES (?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
|
+ ON CONFLICT(token) DO UPDATE SET
|
|
|
+ total_realized_pnl = total_realized_pnl + excluded.total_realized_pnl,
|
|
|
+ total_completed_cycles = total_completed_cycles + 1,
|
|
|
+ winning_cycles = winning_cycles + excluded.winning_cycles,
|
|
|
+ losing_cycles = losing_cycles + excluded.losing_cycles,
|
|
|
+ total_entry_volume = total_entry_volume + excluded.total_entry_volume,
|
|
|
+ total_exit_volume = total_exit_volume + excluded.total_exit_volume,
|
|
|
+ sum_of_winning_pnl = sum_of_winning_pnl + excluded.sum_of_winning_pnl,
|
|
|
+ sum_of_losing_pnl = sum_of_losing_pnl + excluded.sum_of_losing_pnl,
|
|
|
+ largest_winning_cycle_pnl = MAX(largest_winning_cycle_pnl, excluded.largest_winning_cycle_pnl),
|
|
|
+ largest_losing_cycle_pnl = MAX(largest_losing_cycle_pnl, excluded.largest_losing_cycle_pnl),
|
|
|
+ first_cycle_closed_at = MIN(first_cycle_closed_at, excluded.first_cycle_closed_at),
|
|
|
+ last_cycle_closed_at = MAX(last_cycle_closed_at, excluded.last_cycle_closed_at),
|
|
|
+ updated_at = excluded.updated_at
|
|
|
+ """
|
|
|
+ is_win = 1 if realized_pnl > 0 else 0
|
|
|
+ is_loss = 1 if realized_pnl < 0 else 0
|
|
|
+ win_pnl_contrib = realized_pnl if realized_pnl > 0 else 0.0
|
|
|
+ loss_pnl_contrib = abs(realized_pnl) if realized_pnl < 0 else 0.0
|
|
|
+
|
|
|
+ self._execute_query(token_upsert_query, (
|
|
|
+ token, realized_pnl, is_win, is_loss, entry_value, exit_value,
|
|
|
+ win_pnl_contrib, loss_pnl_contrib, win_pnl_contrib, loss_pnl_contrib,
|
|
|
+ closed_at_str, closed_at_str, now_iso
|
|
|
+ ))
|
|
|
+
|
|
|
+ # Update daily_aggregated_stats
|
|
|
+ daily_upsert_query = """
|
|
|
+ INSERT INTO daily_aggregated_stats (
|
|
|
+ date, token, realized_pnl, completed_cycles, entry_volume, exit_volume
|
|
|
+ ) VALUES (?, ?, ?, 1, ?, ?)
|
|
|
+ ON CONFLICT(date, token) DO UPDATE SET
|
|
|
+ realized_pnl = realized_pnl + excluded.realized_pnl,
|
|
|
+ completed_cycles = completed_cycles + 1,
|
|
|
+ entry_volume = entry_volume + excluded.entry_volume,
|
|
|
+ exit_volume = exit_volume + excluded.exit_volume
|
|
|
+ """
|
|
|
+ self._execute_query(daily_upsert_query, (
|
|
|
+ date_str, token, realized_pnl, entry_value, exit_value
|
|
|
+ ))
|
|
|
+ logger.info(f"Aggregated stats for closed trade lifecycle {trade_lifecycle_id} ({token}). PNL: {realized_pnl:.2f}")
|
|
|
+
|
|
|
+ elif status == 'cancelled':
|
|
|
+ # Update token_stats for cancelled count
|
|
|
+ cancelled_upsert_query = """
|
|
|
+ INSERT INTO token_stats (token, total_cancelled_cycles, updated_at)
|
|
|
+ VALUES (?, 1, ?)
|
|
|
+ ON CONFLICT(token) DO UPDATE SET
|
|
|
+ total_cancelled_cycles = total_cancelled_cycles + 1,
|
|
|
+ updated_at = excluded.updated_at
|
|
|
+ """
|
|
|
+ self._execute_query(cancelled_upsert_query, (token, now_iso))
|
|
|
+ logger.info(f"Incremented cancelled_cycles for {token} due to lifecycle {trade_lifecycle_id}.")
|
|
|
+
|
|
|
+ # Delete the original trade from the 'trades' table
|
|
|
+ self._execute_query("DELETE FROM trades WHERE trade_lifecycle_id = ?", (trade_lifecycle_id,))
|
|
|
+ logger.info(f"Deleted trade lifecycle {trade_lifecycle_id} from trades table after aggregation.")
|
|
|
+
|
|
|
+ except sqlite3.Error as e:
|
|
|
+ logger.error(f"Database error migrating trade {trade_lifecycle_id} to aggregate stats: {e}", exc_info=True)
|
|
|
+ except Exception as e:
|
|
|
+ logger.error(f"Unexpected error migrating trade {trade_lifecycle_id} to aggregate stats: {e}", exc_info=True)
|
|
|
+
|
|
|
+ def purge_old_daily_aggregated_stats(self, months_to_keep: int = 10):
|
|
|
+ """Purge records from daily_aggregated_stats older than a specified number of months."""
|
|
|
+ if months_to_keep <= 0:
|
|
|
+ logger.info("Not purging daily_aggregated_stats as months_to_keep is not positive.")
|
|
|
+ return
|
|
|
+
|
|
|
+ try:
|
|
|
+ # Calculate the cutoff date
|
|
|
+ # This is a bit simplified; for more precise month calculations, dateutil.relativedelta might be used
|
|
|
+ # For SQLite, comparing YYYY-MM-DD strings works well.
|
|
|
+ cutoff_date = datetime.now(timezone.utc).date() - timedelta(days=months_to_keep * 30) # Approximate
|
|
|
+ cutoff_date_str = cutoff_date.strftime('%Y-%m-%d')
|
|
|
+
|
|
|
+ query = "DELETE FROM daily_aggregated_stats WHERE date < ?"
|
|
|
+
|
|
|
+ # To count before deleting (optional, for logging)
|
|
|
+ # count_query = "SELECT COUNT(*) as count FROM daily_aggregated_stats WHERE date < ?"
|
|
|
+ # before_count_row = self._fetchone_query(count_query, (cutoff_date_str,))
|
|
|
+ # num_to_delete = before_count_row['count'] if before_count_row else 0
|
|
|
+
|
|
|
+ with self.conn:
|
|
|
+ cursor = self.conn.cursor()
|
|
|
+ cursor.execute(query, (cutoff_date_str,))
|
|
|
+ rows_deleted = cursor.rowcount
|
|
|
+
|
|
|
+ if rows_deleted > 0:
|
|
|
+ logger.info(f"Purged {rows_deleted} old records from daily_aggregated_stats (older than approx. {months_to_keep} months, before {cutoff_date_str}).")
|
|
|
+ else:
|
|
|
+ logger.info(f"No old records found in daily_aggregated_stats to purge (older than approx. {months_to_keep} months, before {cutoff_date_str}).")
|
|
|
+
|
|
|
+ except sqlite3.Error as e:
|
|
|
+ logger.error(f"Database error purging old daily_aggregated_stats: {e}", exc_info=True)
|
|
|
+ except Exception as e:
|
|
|
+ logger.error(f"Unexpected error purging old daily_aggregated_stats: {e}", exc_info=True)
|