trading_stats.py 93 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763
  1. #!/usr/bin/env python3
  2. """
  3. Trading Statistics Tracker (SQLite Version)
  4. Tracks and calculates comprehensive trading statistics using an SQLite database.
  5. """
  6. import sqlite3
  7. import os
  8. import logging
  9. from datetime import datetime, timedelta, timezone
  10. from typing import Dict, List, Any, Optional, Tuple, Union
  11. import math
  12. from collections import defaultdict
  13. import uuid
  14. import numpy as np # Ensure numpy is imported as np
  15. # 🆕 Import the migration runner
  16. from src.migrations.migrate_db import run_migrations as run_db_migrations
  17. from src.utils.token_display_formatter import get_formatter # Added import
  18. logger = logging.getLogger(__name__)
  19. def _normalize_token_case(token: str) -> str:
  20. """
  21. Normalize token case: if any characters are already uppercase, keep as-is.
  22. Otherwise, convert to uppercase. This handles mixed-case tokens like kPEPE, kBONK.
  23. """
  24. # Check if any character is already uppercase
  25. if any(c.isupper() for c in token):
  26. return token # Keep original case for mixed-case tokens
  27. else:
  28. return token.upper() # Convert to uppercase for all-lowercase input
  29. class TradingStats:
  30. """Comprehensive trading statistics tracker using SQLite."""
  31. def __init__(self, db_path: str = "data/trading_stats.sqlite"):
  32. """Initialize the stats tracker and connect to SQLite DB."""
  33. self.db_path = db_path
  34. self._ensure_data_directory()
  35. # 🆕 Run database migrations before connecting and creating tables
  36. # This ensures the schema is up-to-date when the connection is made
  37. # and tables are potentially created for the first time.
  38. logger.info("Running database migrations if needed...")
  39. run_db_migrations(self.db_path) # Pass the correct db_path
  40. logger.info("Database migration check complete.")
  41. self.conn = sqlite3.connect(self.db_path, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES)
  42. self.conn.row_factory = self._dict_factory
  43. self._create_tables() # CREATE IF NOT EXISTS will still be useful for first-time setup
  44. self._initialize_metadata() # Also potentially sets schema_version if DB was just created
  45. # 🆕 Purge old daily aggregated stats on startup
  46. self.purge_old_daily_aggregated_stats()
  47. def _dict_factory(self, cursor, row):
  48. """Convert SQLite rows to dictionaries."""
  49. d = {}
  50. for idx, col in enumerate(cursor.description):
  51. d[col[0]] = row[idx]
  52. return d
  53. def _ensure_data_directory(self):
  54. """Ensure the data directory for the SQLite file exists."""
  55. data_dir = os.path.dirname(self.db_path)
  56. if data_dir and not os.path.exists(data_dir):
  57. os.makedirs(data_dir)
  58. logger.info(f"Created data directory for TradingStats DB: {data_dir}")
  59. def _execute_query(self, query: str, params: tuple = ()):
  60. """Execute a query (INSERT, UPDATE, DELETE)."""
  61. with self.conn:
  62. self.conn.execute(query, params)
  63. def _fetch_query(self, query: str, params: tuple = ()) -> List[Dict[str, Any]]:
  64. """Execute a SELECT query and fetch all results."""
  65. cur = self.conn.cursor()
  66. cur.execute(query, params)
  67. return cur.fetchall()
  68. def _fetchone_query(self, query: str, params: tuple = ()) -> Optional[Dict[str, Any]]:
  69. """Execute a SELECT query and fetch one result."""
  70. cur = self.conn.cursor()
  71. cur.execute(query, params)
  72. return cur.fetchone()
  73. def _create_tables(self):
  74. """Create SQLite tables if they don't exist."""
  75. queries = [
  76. """
  77. CREATE TABLE IF NOT EXISTS metadata (
  78. key TEXT PRIMARY KEY,
  79. value TEXT
  80. )
  81. """,
  82. """
  83. CREATE TABLE IF NOT EXISTS trades (
  84. id INTEGER PRIMARY KEY AUTOINCREMENT,
  85. exchange_fill_id TEXT UNIQUE,
  86. timestamp TEXT NOT NULL,
  87. symbol TEXT NOT NULL,
  88. side TEXT NOT NULL,
  89. amount REAL NOT NULL,
  90. price REAL NOT NULL,
  91. value REAL NOT NULL,
  92. trade_type TEXT NOT NULL,
  93. pnl REAL DEFAULT 0.0,
  94. linked_order_table_id INTEGER,
  95. -- 🆕 PHASE 4: Lifecycle tracking fields (merged from active_trades)
  96. status TEXT DEFAULT 'executed', -- 'pending', 'executed', 'position_opened', 'position_closed', 'cancelled'
  97. trade_lifecycle_id TEXT, -- Groups related trades into one lifecycle
  98. position_side TEXT, -- 'long', 'short', 'flat' - the resulting position side
  99. -- Position tracking
  100. entry_price REAL,
  101. current_position_size REAL DEFAULT 0,
  102. -- Order IDs (exchange IDs)
  103. entry_order_id TEXT,
  104. stop_loss_order_id TEXT,
  105. take_profit_order_id TEXT,
  106. -- Risk management
  107. stop_loss_price REAL,
  108. take_profit_price REAL,
  109. -- P&L tracking
  110. realized_pnl REAL DEFAULT 0,
  111. unrealized_pnl REAL DEFAULT 0,
  112. mark_price REAL DEFAULT 0,
  113. position_value REAL DEFAULT NULL,
  114. unrealized_pnl_percentage REAL DEFAULT NULL,
  115. -- Risk Info from Exchange
  116. liquidation_price REAL DEFAULT NULL,
  117. margin_used REAL DEFAULT NULL,
  118. leverage REAL DEFAULT NULL,
  119. -- Timestamps
  120. position_opened_at TEXT,
  121. position_closed_at TEXT,
  122. updated_at TEXT DEFAULT CURRENT_TIMESTAMP,
  123. -- Notes
  124. notes TEXT
  125. )
  126. """,
  127. """
  128. CREATE TABLE IF NOT EXISTS daily_balances (
  129. date TEXT PRIMARY KEY,
  130. balance REAL NOT NULL,
  131. timestamp TEXT NOT NULL
  132. )
  133. """,
  134. """
  135. CREATE TABLE IF NOT EXISTS balance_adjustments (
  136. id INTEGER PRIMARY KEY AUTOINCREMENT,
  137. adjustment_id TEXT UNIQUE,
  138. timestamp TEXT NOT NULL,
  139. type TEXT NOT NULL, -- 'deposit' or 'withdrawal'
  140. amount REAL NOT NULL, -- Always positive, type indicates direction
  141. description TEXT
  142. )
  143. """,
  144. """
  145. CREATE TABLE IF NOT EXISTS orders (
  146. id INTEGER PRIMARY KEY AUTOINCREMENT,
  147. bot_order_ref_id TEXT UNIQUE,
  148. exchange_order_id TEXT UNIQUE,
  149. symbol TEXT NOT NULL,
  150. side TEXT NOT NULL,
  151. type TEXT NOT NULL,
  152. amount_requested REAL NOT NULL,
  153. amount_filled REAL DEFAULT 0.0,
  154. price REAL, -- For limit, stop, etc.
  155. status TEXT NOT NULL, -- e.g., 'open', 'partially_filled', 'filled', 'cancelled', 'rejected', 'expired', 'pending_trigger'
  156. timestamp_created TEXT NOT NULL,
  157. timestamp_updated TEXT NOT NULL,
  158. parent_bot_order_ref_id TEXT NULLABLE -- To link conditional orders (like SL triggers) to their parent order
  159. )
  160. """,
  161. """
  162. CREATE INDEX IF NOT EXISTS idx_orders_bot_order_ref_id ON orders (bot_order_ref_id);
  163. """,
  164. """
  165. CREATE INDEX IF NOT EXISTS idx_orders_exchange_order_id ON orders (exchange_order_id);
  166. """,
  167. """
  168. CREATE INDEX IF NOT EXISTS idx_trades_exchange_fill_id ON trades (exchange_fill_id);
  169. """,
  170. """
  171. CREATE INDEX IF NOT EXISTS idx_trades_linked_order_table_id ON trades (linked_order_table_id);
  172. """,
  173. """
  174. CREATE INDEX IF NOT EXISTS idx_orders_parent_bot_order_ref_id ON orders (parent_bot_order_ref_id);
  175. """,
  176. """
  177. CREATE INDEX IF NOT EXISTS idx_orders_status_type ON orders (status, type);
  178. """,
  179. """
  180. CREATE INDEX IF NOT EXISTS idx_trades_status ON trades (status);
  181. """,
  182. """
  183. CREATE INDEX IF NOT EXISTS idx_trades_lifecycle_id ON trades (trade_lifecycle_id);
  184. """,
  185. """
  186. CREATE INDEX IF NOT EXISTS idx_trades_position_side ON trades (position_side);
  187. """,
  188. """
  189. CREATE INDEX IF NOT EXISTS idx_trades_symbol_status ON trades (symbol, status);
  190. """
  191. ]
  192. # 🆕 Add new table creation queries
  193. queries.extend([
  194. """
  195. CREATE TABLE IF NOT EXISTS token_stats (
  196. token TEXT PRIMARY KEY,
  197. total_realized_pnl REAL DEFAULT 0.0,
  198. total_completed_cycles INTEGER DEFAULT 0,
  199. winning_cycles INTEGER DEFAULT 0,
  200. losing_cycles INTEGER DEFAULT 0,
  201. total_entry_volume REAL DEFAULT 0.0, -- Sum of (amount * entry_price) for completed cycles
  202. total_exit_volume REAL DEFAULT 0.0, -- Sum of (amount * exit_price) for completed cycles
  203. sum_of_winning_pnl REAL DEFAULT 0.0,
  204. sum_of_losing_pnl REAL DEFAULT 0.0, -- Stored as a positive value
  205. largest_winning_cycle_pnl REAL DEFAULT 0.0,
  206. largest_losing_cycle_pnl REAL DEFAULT 0.0, -- Stored as a positive value
  207. first_cycle_closed_at TEXT,
  208. last_cycle_closed_at TEXT,
  209. total_cancelled_cycles INTEGER DEFAULT 0, -- Count of lifecycles that ended in 'cancelled'
  210. updated_at TEXT DEFAULT CURRENT_TIMESTAMP
  211. )
  212. """,
  213. """
  214. CREATE TABLE IF NOT EXISTS daily_aggregated_stats (
  215. date TEXT NOT NULL, -- YYYY-MM-DD
  216. token TEXT NOT NULL, -- Specific token or a general identifier like '_OVERALL_'
  217. realized_pnl REAL DEFAULT 0.0,
  218. completed_cycles INTEGER DEFAULT 0,
  219. entry_volume REAL DEFAULT 0.0,
  220. exit_volume REAL DEFAULT 0.0,
  221. PRIMARY KEY (date, token)
  222. )
  223. """,
  224. """
  225. CREATE INDEX IF NOT EXISTS idx_daily_stats_date_token ON daily_aggregated_stats (date, token);
  226. """
  227. ])
  228. for query in queries:
  229. self._execute_query(query)
  230. logger.info("SQLite tables ensured for TradingStats.")
  231. def _initialize_metadata(self):
  232. """Initialize metadata if not already present."""
  233. start_date = self._get_metadata('start_date')
  234. initial_balance = self._get_metadata('initial_balance')
  235. if start_date is None:
  236. self._set_metadata('start_date', datetime.now(timezone.utc).isoformat())
  237. logger.info("Initialized 'start_date' in metadata.")
  238. if initial_balance is None:
  239. self._set_metadata('initial_balance', '0.0')
  240. logger.info("Initialized 'initial_balance' in metadata.")
  241. logger.info(f"TradingStats initialized. Start Date: {self._get_metadata('start_date')}, Initial Balance: {self._get_metadata('initial_balance')}")
  242. def _get_metadata(self, key: str) -> Optional[str]:
  243. """Retrieve a value from the metadata table."""
  244. row = self._fetchone_query("SELECT value FROM metadata WHERE key = ?", (key,))
  245. return row['value'] if row else None
  246. def _set_metadata(self, key: str, value: str):
  247. """Set a value in the metadata table."""
  248. self._execute_query("INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)", (key, value))
  249. def set_initial_balance(self, balance: float):
  250. """Set the initial balance if not already set or zero."""
  251. current_initial_balance_str = self._get_metadata('initial_balance')
  252. current_initial_balance = float(current_initial_balance_str) if current_initial_balance_str else 0.0
  253. if current_initial_balance == 0.0: # Only set if it's effectively unset
  254. self._set_metadata('initial_balance', str(balance))
  255. # Also set start_date if it's the first time setting balance
  256. if self._get_metadata('start_date') is None or float(current_initial_balance_str if current_initial_balance_str else '0.0') == 0.0:
  257. self._set_metadata('start_date', datetime.now(timezone.utc).isoformat())
  258. formatter = get_formatter()
  259. logger.info(f"Initial balance set to: {formatter.format_price_with_symbol(balance)}")
  260. else:
  261. formatter = get_formatter()
  262. logger.info(f"Initial balance already set to {formatter.format_price_with_symbol(current_initial_balance)}. Not changing.")
  263. def record_balance(self, balance: float):
  264. """Record daily balance snapshot."""
  265. today_iso = datetime.now(timezone.utc).date().isoformat()
  266. now_iso = datetime.now(timezone.utc).isoformat()
  267. existing_entry = self._fetchone_query("SELECT date FROM daily_balances WHERE date = ?", (today_iso,))
  268. if existing_entry:
  269. self._execute_query("UPDATE daily_balances SET balance = ?, timestamp = ? WHERE date = ?",
  270. (balance, now_iso, today_iso))
  271. else:
  272. self._execute_query("INSERT INTO daily_balances (date, balance, timestamp) VALUES (?, ?, ?)",
  273. (today_iso, balance, now_iso))
  274. # logger.debug(f"Recorded balance for {today_iso}: ${balance:.2f}") # Potentially too verbose
  275. def record_trade(self, symbol: str, side: str, amount: float, price: float,
  276. exchange_fill_id: Optional[str] = None, trade_type: str = "manual",
  277. pnl: Optional[float] = None, timestamp: Optional[str] = None,
  278. linked_order_table_id_to_link: Optional[int] = None):
  279. """Record a trade in the database."""
  280. if timestamp is None:
  281. timestamp = datetime.now(timezone.utc).isoformat()
  282. value = amount * price
  283. self._execute_query(
  284. "INSERT OR IGNORE INTO trades (symbol, side, amount, price, value, trade_type, timestamp, exchange_fill_id, pnl, linked_order_table_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
  285. (symbol, side, amount, price, value, trade_type, timestamp, exchange_fill_id, pnl or 0.0, linked_order_table_id_to_link)
  286. )
  287. formatter = get_formatter()
  288. # Assuming symbol's base asset for amount formatting. If symbol is like BTC/USDT, base is BTC.
  289. base_asset_for_amount = symbol.split('/')[0] if '/' in symbol else symbol
  290. logger.info(f"📈 Trade recorded: {side.upper()} {formatter.format_amount(amount, base_asset_for_amount)} {symbol} @ {formatter.format_price(price, symbol)} ({formatter.format_price(value, symbol)}) [{trade_type}]")
  291. def get_all_trades(self) -> List[Dict[str, Any]]:
  292. """Fetch all trades from the database, ordered by timestamp."""
  293. return self._fetch_query("SELECT * FROM trades ORDER BY timestamp ASC")
  294. def get_trade_by_symbol_and_status(self, symbol: str, status: str) -> Optional[Dict[str, Any]]:
  295. """
  296. Fetches a single trade record for a given symbol and status.
  297. Typically used to find an open position master record.
  298. Assumes that for a given symbol, there's at most one trade record with a specific
  299. active status like 'position_opened'. If multiple could exist, this fetches the most recent.
  300. """
  301. query = "SELECT * FROM trades WHERE symbol = ? AND status = ? ORDER BY id DESC LIMIT 1"
  302. trade = self._fetchone_query(query, (symbol, status))
  303. if trade:
  304. logger.debug(f"Found trade for {symbol} with status {status}: ID {trade.get('id')}")
  305. # else: # Can be noisy if not finding a trade is a common occurrence
  306. # logger.debug(f"No trade found for {symbol} with status {status}")
  307. return trade
  308. def get_basic_stats(self, current_balance: Optional[float] = None) -> Dict[str, Any]:
  309. """Get basic trading statistics from DB, primarily using aggregated tables."""
  310. # Get counts of open positions (trades that are not yet migrated)
  311. open_positions_count = self._get_open_positions_count_from_db()
  312. # Get overall aggregated stats from token_stats table
  313. query_token_stats_summary = """
  314. SELECT
  315. SUM(total_realized_pnl) as total_pnl_from_cycles,
  316. SUM(total_completed_cycles) as total_completed_cycles_sum,
  317. MIN(first_cycle_closed_at) as overall_first_cycle_closed,
  318. MAX(last_cycle_closed_at) as overall_last_cycle_closed
  319. FROM token_stats
  320. """
  321. token_stats_summary = self._fetchone_query(query_token_stats_summary)
  322. total_pnl_from_cycles = token_stats_summary['total_pnl_from_cycles'] if token_stats_summary and token_stats_summary['total_pnl_from_cycles'] is not None else 0.0
  323. total_completed_cycles_sum = token_stats_summary['total_completed_cycles_sum'] if token_stats_summary and token_stats_summary['total_completed_cycles_sum'] is not None else 0
  324. # Total trades considered as sum of completed cycles and currently open positions
  325. # This redefines 'total_trades' from its previous meaning of individual fills.
  326. total_trades_redefined = total_completed_cycles_sum + open_positions_count
  327. initial_balance_str = self._get_metadata('initial_balance')
  328. initial_balance = float(initial_balance_str) if initial_balance_str else 0.0
  329. start_date_iso = self._get_metadata('start_date')
  330. start_date_obj = datetime.fromisoformat(start_date_iso) if start_date_iso else datetime.now(timezone.utc)
  331. days_active = (datetime.now(timezone.utc) - start_date_obj).days + 1
  332. # 'last_trade' timestamp could be the last update to token_stats or an open trade
  333. last_activity_ts = token_stats_summary['overall_last_cycle_closed'] if token_stats_summary else None
  334. last_open_trade_ts_row = self._fetchone_query("SELECT MAX(updated_at) as last_update FROM trades WHERE status = 'position_opened'")
  335. if last_open_trade_ts_row and last_open_trade_ts_row['last_update']:
  336. if not last_activity_ts or datetime.fromisoformat(last_open_trade_ts_row['last_update']) > datetime.fromisoformat(last_activity_ts):
  337. last_activity_ts = last_open_trade_ts_row['last_update']
  338. # Buy/Sell trades count from individual fills is no longer directly available for completed cycles.
  339. # If needed, this requires schema change in token_stats or a different approach.
  340. # For now, these are omitted from basic_stats.
  341. return {
  342. 'total_trades': total_trades_redefined, # This is now cycles + open positions
  343. 'completed_trades': total_completed_cycles_sum, # This is sum of total_completed_cycles from token_stats
  344. # 'buy_trades': buy_trades_count, # Omitted
  345. # 'sell_trades': sell_trades_count, # Omitted
  346. 'initial_balance': initial_balance,
  347. 'total_pnl': total_pnl_from_cycles, # PNL from closed cycles via token_stats
  348. 'days_active': days_active,
  349. 'start_date': start_date_obj.strftime('%Y-%m-%d'),
  350. 'last_trade': last_activity_ts, # Reflects last known activity (cycle close or open trade update)
  351. 'open_positions_count': open_positions_count
  352. }
  353. def get_performance_stats(self) -> Dict[str, Any]:
  354. """Calculate advanced performance statistics using aggregated data from token_stats."""
  355. query = """
  356. SELECT
  357. SUM(total_completed_cycles) as total_cycles,
  358. SUM(winning_cycles) as total_wins,
  359. SUM(losing_cycles) as total_losses,
  360. SUM(sum_of_winning_pnl) as total_winning_pnl,
  361. SUM(sum_of_losing_pnl) as total_losing_pnl, -- Stored positive
  362. MAX(largest_winning_cycle_pnl) as overall_largest_win,
  363. MAX(largest_losing_cycle_pnl) as overall_largest_loss -- Stored positive
  364. FROM token_stats
  365. """
  366. summary = self._fetchone_query(query)
  367. # Add total volume
  368. volume_summary = self._fetchone_query("SELECT SUM(total_exit_volume) as total_volume FROM token_stats")
  369. total_trading_volume = volume_summary['total_volume'] if volume_summary and volume_summary['total_volume'] is not None else 0.0
  370. # Get individual token performances for best/worst
  371. all_token_perf_stats = self.get_token_performance()
  372. best_token_pnl_pct = -float('inf')
  373. best_token_name = "N/A"
  374. worst_token_pnl_pct = float('inf')
  375. worst_token_name = "N/A"
  376. if all_token_perf_stats:
  377. for token_name_iter, stats_data in all_token_perf_stats.items():
  378. pnl_pct = stats_data.get('pnl_percentage', 0.0)
  379. # Ensure token has completed trades and pnl_pct is a valid number
  380. if stats_data.get('completed_trades', 0) > 0 and isinstance(pnl_pct, (int, float)) and not math.isinf(pnl_pct) and not math.isnan(pnl_pct):
  381. if pnl_pct > best_token_pnl_pct:
  382. best_token_pnl_pct = pnl_pct
  383. best_token_name = token_name_iter
  384. if pnl_pct < worst_token_pnl_pct:
  385. worst_token_pnl_pct = pnl_pct
  386. worst_token_name = token_name_iter
  387. # Handle cases where no valid tokens were found for best/worst
  388. if best_token_name == "N/A":
  389. best_token_pnl_pct = 0.0
  390. if worst_token_name == "N/A":
  391. worst_token_pnl_pct = 0.0
  392. if not summary or summary['total_cycles'] is None or summary['total_cycles'] == 0:
  393. return {
  394. 'win_rate': 0.0, 'profit_factor': 0.0, 'avg_win': 0.0, 'avg_loss': 0.0,
  395. 'largest_win': 0.0, 'largest_loss': 0.0,
  396. 'total_wins': 0, 'total_losses': 0, 'expectancy': 0.0,
  397. 'total_trading_volume': total_trading_volume,
  398. 'best_performing_token': {'name': best_token_name, 'pnl_percentage': best_token_pnl_pct},
  399. 'worst_performing_token': {'name': worst_token_name, 'pnl_percentage': worst_token_pnl_pct},
  400. }
  401. total_completed_count = summary['total_cycles']
  402. total_wins_count = summary['total_wins'] if summary['total_wins'] is not None else 0
  403. total_losses_count = summary['total_losses'] if summary['total_losses'] is not None else 0
  404. win_rate = (total_wins_count / total_completed_count * 100) if total_completed_count > 0 else 0.0
  405. sum_of_wins = summary['total_winning_pnl'] if summary['total_winning_pnl'] is not None else 0.0
  406. sum_of_losses = summary['total_losing_pnl'] if summary['total_losing_pnl'] is not None else 0.0 # This is sum of absolute losses
  407. profit_factor = (sum_of_wins / sum_of_losses) if sum_of_losses > 0 else float('inf') if sum_of_wins > 0 else 0.0
  408. avg_win = (sum_of_wins / total_wins_count) if total_wins_count > 0 else 0.0
  409. avg_loss = (sum_of_losses / total_losses_count) if total_losses_count > 0 else 0.0 # Avg of absolute losses
  410. largest_win = summary['overall_largest_win'] if summary['overall_largest_win'] is not None else 0.0
  411. largest_loss = summary['overall_largest_loss'] if summary['overall_largest_loss'] is not None else 0.0 # Largest absolute loss
  412. # Consecutive wins/losses removed as it's hard to track with this aggregation model.
  413. expectancy = (avg_win * (win_rate / 100)) - (avg_loss * (1 - (win_rate / 100)))
  414. return {
  415. 'win_rate': win_rate, 'profit_factor': profit_factor, 'avg_win': avg_win, 'avg_loss': avg_loss,
  416. 'largest_win': largest_win, 'largest_loss': largest_loss,
  417. 'total_wins': total_wins_count, 'total_losses': total_losses_count, 'expectancy': expectancy,
  418. 'total_trading_volume': total_trading_volume,
  419. 'best_performing_token': {'name': best_token_name, 'pnl_percentage': best_token_pnl_pct},
  420. 'worst_performing_token': {'name': worst_token_name, 'pnl_percentage': worst_token_pnl_pct},
  421. }
  422. def get_risk_metrics(self) -> Dict[str, Any]:
  423. """Calculate risk-adjusted metrics from daily balances."""
  424. daily_balances_data = self._fetch_query("SELECT balance FROM daily_balances ORDER BY date ASC")
  425. if not daily_balances_data or len(daily_balances_data) < 2:
  426. return {'sharpe_ratio': 0.0, 'sortino_ratio': 0.0, 'max_drawdown': 0.0, 'volatility': 0.0, 'var_95': 0.0}
  427. balances = [entry['balance'] for entry in daily_balances_data]
  428. returns = np.diff(balances) / balances[:-1] # Calculate daily returns
  429. returns = returns[np.isfinite(returns)] # Remove NaNs or Infs if any balance was 0
  430. if returns.size == 0:
  431. return {'sharpe_ratio': 0.0, 'sortino_ratio': 0.0, 'max_drawdown': 0.0, 'volatility': 0.0, 'var_95': 0.0}
  432. risk_free_rate_daily = (1 + 0.02)**(1/365) - 1 # Approx 2% annual risk-free rate, daily
  433. excess_returns = returns - risk_free_rate_daily
  434. sharpe_ratio = np.mean(excess_returns) / np.std(returns) * np.sqrt(365) if np.std(returns) > 0 else 0.0
  435. downside_returns = returns[returns < 0]
  436. downside_std = np.std(downside_returns) if len(downside_returns) > 0 else 0.0
  437. sortino_ratio = np.mean(excess_returns) / downside_std * np.sqrt(365) if downside_std > 0 else 0.0
  438. cumulative_returns = np.cumprod(1 + returns)
  439. peak = np.maximum.accumulate(cumulative_returns)
  440. drawdown = (cumulative_returns - peak) / peak
  441. max_drawdown_pct = abs(np.min(drawdown) * 100) if drawdown.size > 0 else 0.0
  442. volatility_pct = np.std(returns) * np.sqrt(365) * 100
  443. var_95_pct = abs(np.percentile(returns, 5) * 100) if returns.size > 0 else 0.0
  444. return {
  445. 'sharpe_ratio': sharpe_ratio, 'sortino_ratio': sortino_ratio,
  446. 'max_drawdown': max_drawdown_pct, 'volatility': volatility_pct, 'var_95': var_95_pct
  447. }
  448. def get_comprehensive_stats(self, current_balance: Optional[float] = None) -> Dict[str, Any]:
  449. """Get all statistics combined."""
  450. if current_balance is not None: # Ensure it's not just None, but explicitly provided
  451. self.record_balance(current_balance) # Record current balance for today
  452. basic = self.get_basic_stats(current_balance) # Pass current_balance for P&L context if needed
  453. performance = self.get_performance_stats()
  454. risk = self.get_risk_metrics()
  455. initial_balance = basic['initial_balance']
  456. total_return_pct = 0.0
  457. # Use current_balance if available and valid for total return calculation
  458. # Otherwise, PNL from basic_stats (closed trades) is the primary PNL source
  459. # This needs careful thought: current_balance reflects unrealized PNL too.
  460. # The original code used current_balance - initial_balance for total_pnl if current_balance provided.
  461. effective_balance_for_return = current_balance if current_balance is not None else (initial_balance + basic['total_pnl'])
  462. if initial_balance > 0:
  463. total_return_pct = ((effective_balance_for_return - initial_balance) / initial_balance) * 100
  464. return {
  465. 'basic': basic,
  466. 'performance': performance,
  467. 'risk': risk,
  468. 'current_balance': current_balance if current_balance is not None else initial_balance + basic['total_pnl'], # Best estimate
  469. 'total_return': total_return_pct, # Percentage
  470. 'last_updated': datetime.now(timezone.utc).isoformat()
  471. }
  472. def _get_open_positions_count_from_db(self) -> int:
  473. """🧹 PHASE 4: Get count of open positions from enhanced trades table."""
  474. row = self._fetchone_query("SELECT COUNT(DISTINCT symbol) as count FROM trades WHERE status = 'position_opened'")
  475. return row['count'] if row else 0
  476. def format_stats_message(self, current_balance: Optional[float] = None) -> str:
  477. """Format stats for Telegram display using data from DB."""
  478. try:
  479. stats = self.get_comprehensive_stats(current_balance)
  480. formatter = get_formatter()
  481. basic = stats['basic']
  482. perf = stats['performance']
  483. risk = stats['risk'] # For portfolio drawdown
  484. effective_current_balance = stats['current_balance']
  485. initial_bal = basic['initial_balance']
  486. total_pnl_val = effective_current_balance - initial_bal if initial_bal > 0 and current_balance is not None else basic['total_pnl']
  487. total_return_pct = (total_pnl_val / initial_bal * 100) if initial_bal > 0 else 0.0
  488. pnl_emoji = "🟢" if total_pnl_val >= 0 else "🔴"
  489. open_positions_count = basic['open_positions_count']
  490. stats_text_parts = []
  491. stats_text_parts.append(f"📊 <b>Trading Statistics</b>\n")
  492. # Account Overview
  493. stats_text_parts.append(f"\n💰 <b>Account Overview:</b>")
  494. stats_text_parts.append(f"• Current Balance: {formatter.format_price_with_symbol(effective_current_balance)}")
  495. stats_text_parts.append(f"• Initial Balance: {formatter.format_price_with_symbol(initial_bal)}")
  496. stats_text_parts.append(f"• Open Positions: {open_positions_count}")
  497. stats_text_parts.append(f"• {pnl_emoji} Total P&L: {formatter.format_price_with_symbol(total_pnl_val)} ({total_return_pct:+.2f}%)")
  498. stats_text_parts.append(f"• Days Active: {basic['days_active']}\n")
  499. # Performance Metrics
  500. stats_text_parts.append(f"\n🏆 <b>Performance Metrics:</b>")
  501. stats_text_parts.append(f"• Total Completed Trades: {basic['completed_trades']}")
  502. stats_text_parts.append(f"• Trading Volume (Exit Vol.): {formatter.format_price_with_symbol(perf.get('total_trading_volume', 0.0))}")
  503. stats_text_parts.append(f"• Profit Factor: {perf['profit_factor']:.2f}")
  504. stats_text_parts.append(f"• Expectancy: {formatter.format_price_with_symbol(perf['expectancy'])} (Value per trade)")
  505. # Note for Expectancy Percentage: \"[Info: Percentage representation requires further definition]\" might be too verbose for typical display.
  506. stats_text_parts.append(f"• Largest Winning Trade: {formatter.format_price_with_symbol(perf['largest_win'])} (Value)")
  507. stats_text_parts.append(f"• Largest Losing Trade: {formatter.format_price_with_symbol(perf['largest_loss'])} (Value)")
  508. # Note for Largest Trade P&L %: Similar to expectancy, noting \"[Info: P&L % for specific trades requires data enhancement]\" in the bot message might be too much.
  509. best_token_stats = perf.get('best_performing_token', {'name': 'N/A', 'pnl_percentage': 0.0})
  510. worst_token_stats = perf.get('worst_performing_token', {'name': 'N/A', 'pnl_percentage': 0.0})
  511. stats_text_parts.append(f"• Best Performing Token: {best_token_stats['name']} ({best_token_stats['pnl_percentage']:+.2f}%)")
  512. stats_text_parts.append(f"• Worst Performing Token: {worst_token_stats['name']} ({worst_token_stats['pnl_percentage']:+.2f}%)")
  513. stats_text_parts.append(f"• Average Trade Duration: N/A <i>(Data collection required)</i>")
  514. stats_text_parts.append(f"• Portfolio Max Drawdown: {risk['max_drawdown']:.2f}% <i>(Daily Balance based)</i>")
  515. # Future note: \"[Info: Trading P&L specific drawdown analysis planned]\"
  516. # Session Info
  517. stats_text_parts.append(f"\n\n⏰ <b>Session Info:</b>")
  518. stats_text_parts.append(f"• Bot Started: {basic['start_date']}")
  519. stats_text_parts.append(f"• Stats Last Updated: {datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC')}")
  520. return "\n".join(stats_text_parts).strip()
  521. except Exception as e:
  522. logger.error(f"Error formatting stats message: {e}", exc_info=True)
  523. return f"""📊 <b>Trading Statistics</b>\n\n❌ <b>Error loading statistics</b>\n\n🔧 <b>Debug info:</b> {str(e)[:100]}"""
  524. def get_recent_trades(self, limit: int = 10) -> List[Dict[str, Any]]:
  525. """Get recent trades from DB (these are active/open trades, as completed ones are migrated)."""
  526. return self._fetch_query("SELECT * FROM trades WHERE status = 'position_opened' ORDER BY updated_at DESC LIMIT ?", (limit,))
  527. def get_token_performance(self) -> Dict[str, Dict[str, Any]]:
  528. """Get performance statistics grouped by token using the token_stats table."""
  529. all_token_stats = self._fetch_query("SELECT * FROM token_stats ORDER BY token ASC")
  530. token_performance_map = {}
  531. for record in all_token_stats:
  532. token = record['token']
  533. total_pnl = record.get('total_realized_pnl', 0.0)
  534. # total_volume_sold now refers to total_exit_volume from token_stats
  535. total_volume = record.get('total_exit_volume', 0.0)
  536. pnl_percentage = (total_pnl / total_volume * 100) if total_volume > 0 else 0.0
  537. total_completed_count = record.get('total_completed_cycles', 0)
  538. total_wins_count = record.get('winning_cycles', 0)
  539. total_losses_count = record.get('losing_cycles', 0)
  540. win_rate = (total_wins_count / total_completed_count * 100) if total_completed_count > 0 else 0.0
  541. sum_of_wins = record.get('sum_of_winning_pnl', 0.0)
  542. sum_of_losses = record.get('sum_of_losing_pnl', 0.0) # Stored positive
  543. profit_factor = (sum_of_wins / sum_of_losses) if sum_of_losses > 0 else float('inf') if sum_of_wins > 0 else 0.0
  544. avg_win = (sum_of_wins / total_wins_count) if total_wins_count > 0 else 0.0
  545. avg_loss = (sum_of_losses / total_losses_count) if total_losses_count > 0 else 0.0
  546. expectancy = (avg_win * (win_rate / 100)) - (avg_loss * (1 - (win_rate / 100)))
  547. largest_win = record.get('largest_winning_cycle_pnl', 0.0)
  548. largest_loss = record.get('largest_losing_cycle_pnl', 0.0) # Stored positive
  549. token_performance_map[token] = {
  550. 'token': token, # Added for easier access if iterating over values
  551. 'total_pnl': total_pnl,
  552. 'pnl_percentage': pnl_percentage,
  553. 'completed_trades': total_completed_count,
  554. 'total_volume': total_volume, # This is total_exit_volume
  555. 'win_rate': win_rate,
  556. 'total_wins': total_wins_count,
  557. 'total_losses': total_losses_count,
  558. 'profit_factor': profit_factor,
  559. 'expectancy': expectancy,
  560. 'largest_win': largest_win,
  561. 'largest_loss': largest_loss,
  562. 'avg_win': avg_win,
  563. 'avg_loss': avg_loss,
  564. 'first_cycle_closed_at': record.get('first_cycle_closed_at'),
  565. 'last_cycle_closed_at': record.get('last_cycle_closed_at'),
  566. 'total_cancelled_cycles': record.get('total_cancelled_cycles', 0)
  567. }
  568. return token_performance_map
  569. def get_token_detailed_stats(self, token: str) -> Dict[str, Any]:
  570. """Get detailed statistics for a specific token using token_stats and current open trades."""
  571. upper_token = _normalize_token_case(token)
  572. # Get aggregated performance from token_stats
  573. token_agg_stats = self._fetchone_query("SELECT * FROM token_stats WHERE token = ?", (upper_token,))
  574. # Get currently open trades for this token from the 'trades' table (not yet migrated)
  575. # These are not completed cycles but represent current exposure.
  576. open_trades_for_token = self._fetch_query(
  577. "SELECT * FROM trades WHERE symbol LIKE ? AND status = 'position_opened' ORDER BY timestamp ASC",
  578. (f"{upper_token}/%",)
  579. )
  580. if not token_agg_stats and not open_trades_for_token:
  581. return {
  582. 'token': upper_token, 'total_trades': 0, 'total_pnl': 0.0, 'win_rate': 0.0,
  583. 'message': f"No trading history or open positions found for {upper_token}"
  584. }
  585. # Initialize with empty performance if no aggregated data
  586. perf_stats = {}
  587. if token_agg_stats:
  588. perf_stats = {
  589. 'completed_trades': token_agg_stats.get('total_completed_cycles', 0),
  590. 'total_pnl': token_agg_stats.get('total_realized_pnl', 0.0),
  591. 'pnl_percentage': 0.0, # Recalculate if needed, or store avg pnl_percentage
  592. 'win_rate': 0.0,
  593. 'profit_factor': token_agg_stats.get('profit_factor'), # Placeholder, need to calc from sums
  594. 'avg_win': 0.0,
  595. 'avg_loss': 0.0,
  596. 'largest_win': token_agg_stats.get('largest_winning_cycle_pnl', 0.0),
  597. 'largest_loss': token_agg_stats.get('largest_losing_cycle_pnl', 0.0),
  598. 'expectancy': 0.0,
  599. 'total_wins': token_agg_stats.get('winning_cycles',0),
  600. 'total_losses': token_agg_stats.get('losing_cycles',0),
  601. 'completed_entry_volume': token_agg_stats.get('total_entry_volume', 0.0),
  602. 'completed_exit_volume': token_agg_stats.get('total_exit_volume', 0.0),
  603. 'total_cancelled': token_agg_stats.get('total_cancelled_cycles', 0)
  604. }
  605. if perf_stats['completed_trades'] > 0:
  606. perf_stats['win_rate'] = (perf_stats['total_wins'] / perf_stats['completed_trades'] * 100) if perf_stats['completed_trades'] > 0 else 0.0
  607. sum_wins = token_agg_stats.get('sum_of_winning_pnl', 0.0)
  608. sum_losses = token_agg_stats.get('sum_of_losing_pnl', 0.0)
  609. perf_stats['profit_factor'] = (sum_wins / sum_losses) if sum_losses > 0 else float('inf') if sum_wins > 0 else 0.0
  610. perf_stats['avg_win'] = (sum_wins / perf_stats['total_wins']) if perf_stats['total_wins'] > 0 else 0.0
  611. perf_stats['avg_loss'] = (sum_losses / perf_stats['total_losses']) if perf_stats['total_losses'] > 0 else 0.0
  612. perf_stats['expectancy'] = (perf_stats['avg_win'] * (perf_stats['win_rate'] / 100)) - (perf_stats['avg_loss'] * (1 - (perf_stats['win_rate'] / 100)))
  613. if perf_stats['completed_exit_volume'] > 0:
  614. perf_stats['pnl_percentage'] = (perf_stats['total_pnl'] / perf_stats['completed_exit_volume'] * 100)
  615. else: # No completed cycles for this token yet
  616. perf_stats = {
  617. 'completed_trades': 0, 'total_pnl': 0.0, 'pnl_percentage': 0.0, 'win_rate': 0.0,
  618. 'profit_factor': 0.0, 'avg_win': 0.0, 'avg_loss': 0.0, 'largest_win': 0.0, 'largest_loss': 0.0,
  619. 'expectancy': 0.0, 'total_wins':0, 'total_losses':0,
  620. 'completed_entry_volume': 0.0, 'completed_exit_volume': 0.0, 'total_cancelled': 0
  621. }
  622. # Info about open positions for this token (raw trades, not cycles)
  623. open_positions_summary = []
  624. total_open_value = 0.0
  625. total_open_unrealized_pnl = 0.0
  626. for op_trade in open_trades_for_token:
  627. open_positions_summary.append({
  628. 'lifecycle_id': op_trade.get('trade_lifecycle_id'),
  629. 'side': op_trade.get('position_side'),
  630. 'amount': op_trade.get('current_position_size'),
  631. 'entry_price': op_trade.get('entry_price'),
  632. 'mark_price': op_trade.get('mark_price'),
  633. 'unrealized_pnl': op_trade.get('unrealized_pnl'),
  634. 'opened_at': op_trade.get('position_opened_at')
  635. })
  636. total_open_value += op_trade.get('value', 0.0) # Initial value of open positions
  637. total_open_unrealized_pnl += op_trade.get('unrealized_pnl', 0.0)
  638. # Raw individual orders from 'orders' table for this token can be complex to summarize here
  639. # The old version counted 'buy_orders' and 'sell_orders' from all trades for the token.
  640. # This is no longer straightforward for completed cycles.
  641. # We can count open orders for this token.
  642. open_orders_count_row = self._fetchone_query(
  643. "SELECT COUNT(*) as count FROM orders WHERE symbol LIKE ? AND status IN ('open', 'submitted', 'pending_trigger')",
  644. (f"{upper_token}/%",)
  645. )
  646. current_open_orders_for_token = open_orders_count_row['count'] if open_orders_count_row else 0
  647. # 'total_trades' here could mean total orders ever placed for this token, or completed cycles + open positions
  648. # Let's define it as completed cycles + number of currently open positions for consistency with get_basic_stats
  649. effective_total_trades = perf_stats['completed_trades'] + len(open_trades_for_token)
  650. return {
  651. 'token': upper_token,
  652. 'message': f"Statistics for {upper_token}",
  653. 'performance_summary': perf_stats, # From token_stats table
  654. 'open_positions': open_positions_summary, # List of currently open positions
  655. 'open_positions_count': len(open_trades_for_token),
  656. 'current_open_orders_count': current_open_orders_for_token,
  657. 'summary_total_trades': effective_total_trades, # Completed cycles + open positions
  658. 'summary_total_realized_pnl': perf_stats['total_pnl'],
  659. 'summary_total_unrealized_pnl': total_open_unrealized_pnl,
  660. # 'cycles': token_cycles # Raw cycle data for completed trades is no longer stored directly after migration
  661. }
  662. def get_daily_stats(self, limit: int = 10) -> List[Dict[str, Any]]:
  663. """Get daily performance stats for the last N days from daily_aggregated_stats."""
  664. daily_stats_list = []
  665. today_utc = datetime.now(timezone.utc).date()
  666. for i in range(limit):
  667. target_date = today_utc - timedelta(days=i)
  668. date_str = target_date.strftime('%Y-%m-%d')
  669. date_formatted = target_date.strftime('%m/%d') # For display
  670. # Query for all tokens for that day and sum them up
  671. # Or, if daily_aggregated_stats stores an _OVERALL_ record, query that.
  672. # Assuming for now we sum up all token records for a given day.
  673. day_aggregated_data = self._fetch_query(
  674. "SELECT SUM(realized_pnl) as pnl, SUM(completed_cycles) as trades, SUM(exit_volume) as volume FROM daily_aggregated_stats WHERE date = ?",
  675. (date_str,)
  676. )
  677. stats_for_day = None
  678. if day_aggregated_data and len(day_aggregated_data) > 0 and day_aggregated_data[0]['trades'] is not None:
  679. stats_for_day = day_aggregated_data[0]
  680. # Calculate pnl_pct if volume is present and positive
  681. pnl = stats_for_day.get('pnl', 0.0) or 0.0
  682. volume = stats_for_day.get('volume', 0.0) or 0.0
  683. stats_for_day['pnl_pct'] = (pnl / volume * 100) if volume > 0 else 0.0
  684. # Ensure trades is an int
  685. stats_for_day['trades'] = int(stats_for_day.get('trades', 0) or 0)
  686. if stats_for_day and stats_for_day['trades'] > 0:
  687. daily_stats_list.append({
  688. 'date': date_str, 'date_formatted': date_formatted, 'has_trades': True,
  689. **stats_for_day
  690. })
  691. else:
  692. daily_stats_list.append({
  693. 'date': date_str, 'date_formatted': date_formatted, 'has_trades': False,
  694. 'trades': 0, 'pnl': 0.0, 'volume': 0.0, 'pnl_pct': 0.0
  695. })
  696. return daily_stats_list
  697. def get_weekly_stats(self, limit: int = 10) -> List[Dict[str, Any]]:
  698. """Get weekly performance stats for the last N weeks by aggregating daily_aggregated_stats."""
  699. weekly_stats_list = []
  700. today_utc = datetime.now(timezone.utc).date()
  701. for i in range(limit):
  702. target_monday = today_utc - timedelta(days=today_utc.weekday() + (i * 7))
  703. target_sunday = target_monday + timedelta(days=6)
  704. week_key_display = f"{target_monday.strftime('%Y-W%W')}" # For internal key if needed
  705. week_formatted_display = f"{target_monday.strftime('%m/%d')}-{target_sunday.strftime('%m/%d/%y')}"
  706. # Fetch daily records for this week range
  707. daily_records_for_week = self._fetch_query(
  708. "SELECT date, realized_pnl, completed_cycles, exit_volume FROM daily_aggregated_stats WHERE date BETWEEN ? AND ?",
  709. (target_monday.strftime('%Y-%m-%d'), target_sunday.strftime('%Y-%m-%d'))
  710. )
  711. if daily_records_for_week:
  712. total_pnl_week = sum(d.get('realized_pnl', 0.0) or 0.0 for d in daily_records_for_week)
  713. total_trades_week = sum(d.get('completed_cycles', 0) or 0 for d in daily_records_for_week)
  714. total_volume_week = sum(d.get('exit_volume', 0.0) or 0.0 for d in daily_records_for_week)
  715. pnl_pct_week = (total_pnl_week / total_volume_week * 100) if total_volume_week > 0 else 0.0
  716. if total_trades_week > 0:
  717. weekly_stats_list.append({
  718. 'week': week_key_display,
  719. 'week_formatted': week_formatted_display,
  720. 'has_trades': True,
  721. 'pnl': total_pnl_week,
  722. 'trades': total_trades_week,
  723. 'volume': total_volume_week,
  724. 'pnl_pct': pnl_pct_week
  725. })
  726. else:
  727. weekly_stats_list.append({
  728. 'week': week_key_display, 'week_formatted': week_formatted_display, 'has_trades': False,
  729. 'trades': 0, 'pnl': 0.0, 'volume': 0.0, 'pnl_pct': 0.0
  730. })
  731. else:
  732. weekly_stats_list.append({
  733. 'week': week_key_display, 'week_formatted': week_formatted_display, 'has_trades': False,
  734. 'trades': 0, 'pnl': 0.0, 'volume': 0.0, 'pnl_pct': 0.0
  735. })
  736. return weekly_stats_list
  737. def get_monthly_stats(self, limit: int = 10) -> List[Dict[str, Any]]:
  738. """Get monthly performance stats for the last N months by aggregating daily_aggregated_stats."""
  739. monthly_stats_list = []
  740. current_month_start_utc = datetime.now(timezone.utc).date().replace(day=1)
  741. for i in range(limit):
  742. year = current_month_start_utc.year
  743. month = current_month_start_utc.month - i
  744. while month <= 0:
  745. month += 12
  746. year -= 1
  747. target_month_start_date = datetime(year, month, 1, tzinfo=timezone.utc).date()
  748. # Find end of target month
  749. next_month_start_date = datetime(year + (month // 12), (month % 12) + 1, 1, tzinfo=timezone.utc).date() if month < 12 else datetime(year + 1, 1, 1, tzinfo=timezone.utc).date()
  750. target_month_end_date = next_month_start_date - timedelta(days=1)
  751. month_key_display = target_month_start_date.strftime('%Y-%m')
  752. month_formatted_display = target_month_start_date.strftime('%b %Y')
  753. daily_records_for_month = self._fetch_query(
  754. "SELECT date, realized_pnl, completed_cycles, exit_volume FROM daily_aggregated_stats WHERE date BETWEEN ? AND ?",
  755. (target_month_start_date.strftime('%Y-%m-%d'), target_month_end_date.strftime('%Y-%m-%d'))
  756. )
  757. if daily_records_for_month:
  758. total_pnl_month = sum(d.get('realized_pnl', 0.0) or 0.0 for d in daily_records_for_month)
  759. total_trades_month = sum(d.get('completed_cycles', 0) or 0 for d in daily_records_for_month)
  760. total_volume_month = sum(d.get('exit_volume', 0.0) or 0.0 for d in daily_records_for_month)
  761. pnl_pct_month = (total_pnl_month / total_volume_month * 100) if total_volume_month > 0 else 0.0
  762. if total_trades_month > 0:
  763. monthly_stats_list.append({
  764. 'month': month_key_display,
  765. 'month_formatted': month_formatted_display,
  766. 'has_trades': True,
  767. 'pnl': total_pnl_month,
  768. 'trades': total_trades_month,
  769. 'volume': total_volume_month,
  770. 'pnl_pct': pnl_pct_month
  771. })
  772. else:
  773. monthly_stats_list.append({
  774. 'month': month_key_display, 'month_formatted': month_formatted_display, 'has_trades': False,
  775. 'trades': 0, 'pnl': 0.0, 'volume': 0.0, 'pnl_pct': 0.0
  776. })
  777. else:
  778. monthly_stats_list.append({
  779. 'month': month_key_display, 'month_formatted': month_formatted_display, 'has_trades': False,
  780. 'trades': 0, 'pnl': 0.0, 'volume': 0.0, 'pnl_pct': 0.0
  781. })
  782. return monthly_stats_list
  783. def record_deposit(self, amount: float, timestamp: Optional[str] = None,
  784. deposit_id: Optional[str] = None, description: Optional[str] = None):
  785. """Record a deposit."""
  786. ts = timestamp if timestamp else datetime.now(timezone.utc).isoformat()
  787. formatter = get_formatter()
  788. formatted_amount_str = formatter.format_price_with_symbol(amount)
  789. desc = description if description else f'Deposit of {formatted_amount_str}'
  790. self._execute_query(
  791. "INSERT INTO balance_adjustments (adjustment_id, timestamp, type, amount, description) VALUES (?, ?, ?, ?, ?)",
  792. (deposit_id or str(uuid.uuid4()), ts, 'deposit', amount, desc) # Ensured uuid is string
  793. )
  794. # Adjust initial_balance in metadata to reflect capital changes
  795. current_initial = float(self._get_metadata('initial_balance') or '0.0')
  796. self._set_metadata('initial_balance', str(current_initial + amount))
  797. logger.info(f"💰 Recorded deposit: {formatted_amount_str}. New effective initial balance: {formatter.format_price_with_symbol(current_initial + amount)}")
  798. def record_withdrawal(self, amount: float, timestamp: Optional[str] = None,
  799. withdrawal_id: Optional[str] = None, description: Optional[str] = None):
  800. """Record a withdrawal."""
  801. ts = timestamp if timestamp else datetime.now(timezone.utc).isoformat()
  802. formatter = get_formatter()
  803. formatted_amount_str = formatter.format_price_with_symbol(amount)
  804. desc = description if description else f'Withdrawal of {formatted_amount_str}'
  805. self._execute_query(
  806. "INSERT INTO balance_adjustments (adjustment_id, timestamp, type, amount, description) VALUES (?, ?, ?, ?, ?)",
  807. (withdrawal_id or str(uuid.uuid4()), ts, 'withdrawal', amount, desc) # Ensured uuid is string
  808. )
  809. current_initial = float(self._get_metadata('initial_balance') or '0.0')
  810. self._set_metadata('initial_balance', str(current_initial - amount))
  811. logger.info(f"💸 Recorded withdrawal: {formatted_amount_str}. New effective initial balance: {formatter.format_price_with_symbol(current_initial - amount)}")
  812. def get_balance_adjustments_summary(self) -> Dict[str, Any]:
  813. """Get summary of all balance adjustments from DB."""
  814. adjustments = self._fetch_query("SELECT type, amount, timestamp FROM balance_adjustments ORDER BY timestamp ASC")
  815. if not adjustments:
  816. return {'total_deposits': 0.0, 'total_withdrawals': 0.0, 'net_adjustment': 0.0,
  817. 'adjustment_count': 0, 'last_adjustment': None}
  818. total_deposits = sum(adj['amount'] for adj in adjustments if adj['type'] == 'deposit')
  819. total_withdrawals = sum(adj['amount'] for adj in adjustments if adj['type'] == 'withdrawal') # Amounts stored positive
  820. net_adjustment = total_deposits - total_withdrawals
  821. return {
  822. 'total_deposits': total_deposits, 'total_withdrawals': total_withdrawals,
  823. 'net_adjustment': net_adjustment, 'adjustment_count': len(adjustments),
  824. 'last_adjustment': adjustments[-1]['timestamp'] if adjustments else None
  825. }
  826. def close_connection(self):
  827. """Close the SQLite database connection."""
  828. if self.conn:
  829. self.conn.close()
  830. logger.info("TradingStats SQLite connection closed.")
  831. def __del__(self):
  832. """Ensure connection is closed when object is deleted."""
  833. self.close_connection()
  834. # --- Order Table Management ---
  835. def record_order_placed(self, symbol: str, side: str, order_type: str,
  836. amount_requested: float, price: Optional[float] = None,
  837. bot_order_ref_id: Optional[str] = None,
  838. exchange_order_id: Optional[str] = None,
  839. status: str = 'open',
  840. parent_bot_order_ref_id: Optional[str] = None) -> Optional[int]:
  841. """Record a newly placed order in the 'orders' table. Returns the ID of the inserted order or None on failure."""
  842. now_iso = datetime.now(timezone.utc).isoformat()
  843. query = """
  844. INSERT INTO orders (bot_order_ref_id, exchange_order_id, symbol, side, type,
  845. amount_requested, price, status, timestamp_created, timestamp_updated, parent_bot_order_ref_id)
  846. VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
  847. """
  848. params = (bot_order_ref_id, exchange_order_id, symbol, side.lower(), order_type.lower(),
  849. amount_requested, price, status.lower(), now_iso, now_iso, parent_bot_order_ref_id)
  850. try:
  851. cur = self.conn.cursor()
  852. cur.execute(query, params)
  853. self.conn.commit()
  854. order_db_id = cur.lastrowid
  855. logger.info(f"Recorded order placed: ID {order_db_id}, Symbol {symbol}, Side {side}, Type {order_type}, Amount {amount_requested}, BotRef {bot_order_ref_id}, ExchID {exchange_order_id}")
  856. return order_db_id
  857. except sqlite3.IntegrityError as e:
  858. logger.error(f"Failed to record order due to IntegrityError (likely duplicate bot_order_ref_id '{bot_order_ref_id}' or exchange_order_id '{exchange_order_id}'): {e}")
  859. return None
  860. except Exception as e:
  861. logger.error(f"Failed to record order: {e}")
  862. return None
  863. def update_order_status(self, order_db_id: Optional[int] = None, bot_order_ref_id: Optional[str] = None, exchange_order_id: Optional[str] = None,
  864. new_status: Optional[str] = None, amount_filled_increment: Optional[float] = None, set_exchange_order_id: Optional[str] = None) -> bool:
  865. """Update an existing order's status and/or amount_filled. Identify order by order_db_id, bot_order_ref_id, or exchange_order_id.
  866. Args:
  867. order_db_id: Database ID to identify the order
  868. bot_order_ref_id: Bot's internal reference ID to identify the order
  869. exchange_order_id: Exchange's order ID to identify the order
  870. new_status: New status to set
  871. amount_filled_increment: Amount to add to current filled amount
  872. set_exchange_order_id: If provided, sets/updates the exchange_order_id field in the database
  873. """
  874. if not any([order_db_id, bot_order_ref_id, exchange_order_id]):
  875. logger.error("Must provide one of order_db_id, bot_order_ref_id, or exchange_order_id to update order.")
  876. return False
  877. now_iso = datetime.now(timezone.utc).isoformat()
  878. set_clauses = []
  879. params = []
  880. if new_status:
  881. set_clauses.append("status = ?")
  882. params.append(new_status.lower())
  883. if set_exchange_order_id is not None:
  884. set_clauses.append("exchange_order_id = ?")
  885. params.append(set_exchange_order_id)
  886. current_amount_filled = 0.0
  887. identifier_clause = ""
  888. identifier_param = None
  889. if order_db_id:
  890. identifier_clause = "id = ?"
  891. identifier_param = order_db_id
  892. elif bot_order_ref_id:
  893. identifier_clause = "bot_order_ref_id = ?"
  894. identifier_param = bot_order_ref_id
  895. elif exchange_order_id:
  896. identifier_clause = "exchange_order_id = ?"
  897. identifier_param = exchange_order_id
  898. if amount_filled_increment is not None and amount_filled_increment > 0:
  899. # To correctly increment, we might need to fetch current filled amount first if DB doesn't support direct increment easily or atomically with other updates.
  900. # For simplicity here, assuming we can use SQL's increment if other fields are not changing, or we do it in two steps.
  901. # Let's assume we fetch first then update to be safe and clear.
  902. order_data = self._fetchone_query(f"SELECT amount_filled FROM orders WHERE {identifier_clause}", (identifier_param,))
  903. if order_data:
  904. current_amount_filled = order_data.get('amount_filled', 0.0)
  905. else:
  906. logger.warning(f"Order not found by {identifier_clause}={identifier_param} when trying to increment amount_filled.")
  907. # Potentially still update status if new_status is provided, but amount_filled won't be right.
  908. # For now, let's proceed with update if status is there.
  909. set_clauses.append("amount_filled = ?")
  910. params.append(current_amount_filled + amount_filled_increment)
  911. if not set_clauses:
  912. logger.info("No fields to update for order.")
  913. return True # No update needed, not an error
  914. set_clauses.append("timestamp_updated = ?")
  915. params.append(now_iso)
  916. params.append(identifier_param) # Add identifier param at the end for WHERE clause
  917. query = f"UPDATE orders SET { ', '.join(set_clauses) } WHERE {identifier_clause}"
  918. try:
  919. self._execute_query(query, tuple(params))
  920. log_msg = f"Updated order ({identifier_clause}={identifier_param}): Status to '{new_status or 'N/A'}', Filled increment {amount_filled_increment or 0.0}"
  921. if set_exchange_order_id is not None:
  922. log_msg += f", Exchange ID set to '{set_exchange_order_id}'"
  923. logger.info(log_msg)
  924. return True
  925. except Exception as e:
  926. logger.error(f"Failed to update order ({identifier_clause}={identifier_param}): {e}")
  927. return False
  928. def get_order_by_db_id(self, order_db_id: int) -> Optional[Dict[str, Any]]:
  929. """Fetch an order by its database primary key ID."""
  930. return self._fetchone_query("SELECT * FROM orders WHERE id = ?", (order_db_id,))
  931. def get_order_by_bot_ref_id(self, bot_order_ref_id: str) -> Optional[Dict[str, Any]]:
  932. """Fetch an order by the bot's internal reference ID."""
  933. return self._fetchone_query("SELECT * FROM orders WHERE bot_order_ref_id = ?", (bot_order_ref_id,))
  934. def get_order_by_exchange_id(self, exchange_order_id: str) -> Optional[Dict[str, Any]]:
  935. """Fetch an order by the exchange's order ID."""
  936. return self._fetchone_query("SELECT * FROM orders WHERE exchange_order_id = ?", (exchange_order_id,))
  937. def get_orders_by_status(self, status: str, order_type_filter: Optional[str] = None, parent_bot_order_ref_id: Optional[str] = None) -> List[Dict[str, Any]]:
  938. """Fetch all orders with a specific status, optionally filtering by order_type and parent_bot_order_ref_id."""
  939. query = "SELECT * FROM orders WHERE status = ?"
  940. params = [status.lower()]
  941. if order_type_filter:
  942. query += " AND type = ?"
  943. params.append(order_type_filter.lower())
  944. if parent_bot_order_ref_id:
  945. query += " AND parent_bot_order_ref_id = ?"
  946. params.append(parent_bot_order_ref_id)
  947. query += " ORDER BY timestamp_created ASC"
  948. return self._fetch_query(query, tuple(params))
  949. def cancel_linked_orders(self, parent_bot_order_ref_id: str, new_status: str = 'cancelled_parent_filled') -> int:
  950. """Cancel all orders linked to a parent order (e.g., pending stop losses when parent order fills or gets cancelled).
  951. Returns the number of orders that were cancelled."""
  952. linked_orders = self.get_orders_by_status('pending_trigger', parent_bot_order_ref_id=parent_bot_order_ref_id)
  953. cancelled_count = 0
  954. for order in linked_orders:
  955. order_db_id = order.get('id')
  956. if order_db_id:
  957. success = self.update_order_status(order_db_id=order_db_id, new_status=new_status)
  958. if success:
  959. cancelled_count += 1
  960. logger.info(f"Cancelled linked order ID {order_db_id} (parent: {parent_bot_order_ref_id}) -> status: {new_status}")
  961. return cancelled_count
  962. def cancel_pending_stop_losses_by_symbol(self, symbol: str, new_status: str = 'cancelled_position_closed') -> int:
  963. """Cancel all pending stop loss orders for a specific symbol (when position is closed).
  964. Returns the number of stop loss orders that were cancelled."""
  965. query = "SELECT * FROM orders WHERE symbol = ? AND status = 'pending_trigger' AND type = 'stop_limit_trigger'"
  966. pending_stop_losses = self._fetch_query(query, (symbol,))
  967. cancelled_count = 0
  968. for order in pending_stop_losses:
  969. order_db_id = order.get('id')
  970. if order_db_id:
  971. success = self.update_order_status(order_db_id=order_db_id, new_status=new_status)
  972. if success:
  973. cancelled_count += 1
  974. logger.info(f"Cancelled pending SL order ID {order_db_id} for {symbol} -> status: {new_status}")
  975. return cancelled_count
  976. def get_order_cleanup_summary(self) -> Dict[str, Any]:
  977. """Get summary of order cleanup actions for monitoring and debugging."""
  978. try:
  979. # Get counts of different cancellation types
  980. cleanup_stats = {}
  981. cancellation_types = [
  982. 'cancelled_parent_cancelled',
  983. 'cancelled_parent_disappeared',
  984. 'cancelled_manual_exit',
  985. 'cancelled_auto_exit',
  986. 'cancelled_no_position',
  987. 'cancelled_external_position_close',
  988. 'cancelled_orphaned_no_position',
  989. 'cancelled_externally',
  990. 'immediately_executed_on_activation',
  991. 'activation_execution_failed',
  992. 'activation_execution_error'
  993. ]
  994. for cancel_type in cancellation_types:
  995. count_result = self._fetchone_query(
  996. "SELECT COUNT(*) as count FROM orders WHERE status = ?",
  997. (cancel_type,)
  998. )
  999. cleanup_stats[cancel_type] = count_result['count'] if count_result else 0
  1000. # Get currently pending stop losses
  1001. pending_sls = self.get_orders_by_status('pending_trigger', 'stop_limit_trigger')
  1002. cleanup_stats['currently_pending_stop_losses'] = len(pending_sls)
  1003. # Get total orders in various states
  1004. active_orders = self._fetchone_query(
  1005. "SELECT COUNT(*) as count FROM orders WHERE status IN ('open', 'submitted', 'partially_filled')",
  1006. ()
  1007. )
  1008. cleanup_stats['currently_active_orders'] = active_orders['count'] if active_orders else 0
  1009. return cleanup_stats
  1010. except Exception as e:
  1011. logger.error(f"Error getting order cleanup summary: {e}")
  1012. return {}
  1013. def get_external_activity_summary(self, days: int = 7) -> Dict[str, Any]:
  1014. """Get summary of external activity (trades and cancellations) over the last N days."""
  1015. try:
  1016. from datetime import timedelta
  1017. cutoff_date = (datetime.now(timezone.utc) - timedelta(days=days)).isoformat()
  1018. # External trades
  1019. external_trades = self._fetch_query(
  1020. "SELECT COUNT(*) as count, side FROM trades WHERE trade_type = 'external' AND timestamp >= ? GROUP BY side",
  1021. (cutoff_date,)
  1022. )
  1023. external_trade_summary = {
  1024. 'external_buy_trades': 0,
  1025. 'external_sell_trades': 0,
  1026. 'total_external_trades': 0
  1027. }
  1028. for trade_group in external_trades:
  1029. side = trade_group['side']
  1030. count = trade_group['count']
  1031. external_trade_summary['total_external_trades'] += count
  1032. if side == 'buy':
  1033. external_trade_summary['external_buy_trades'] = count
  1034. elif side == 'sell':
  1035. external_trade_summary['external_sell_trades'] = count
  1036. # External cancellations
  1037. external_cancellations = self._fetchone_query(
  1038. "SELECT COUNT(*) as count FROM orders WHERE status = 'cancelled_externally' AND timestamp_updated >= ?",
  1039. (cutoff_date,)
  1040. )
  1041. external_trade_summary['external_cancellations'] = external_cancellations['count'] if external_cancellations else 0
  1042. # Cleanup actions
  1043. cleanup_cancellations = self._fetchone_query(
  1044. """SELECT COUNT(*) as count FROM orders
  1045. WHERE status LIKE 'cancelled_%'
  1046. AND status != 'cancelled_externally'
  1047. AND timestamp_updated >= ?""",
  1048. (cutoff_date,)
  1049. )
  1050. external_trade_summary['cleanup_cancellations'] = cleanup_cancellations['count'] if cleanup_cancellations else 0
  1051. external_trade_summary['period_days'] = days
  1052. return external_trade_summary
  1053. except Exception as e:
  1054. logger.error(f"Error getting external activity summary: {e}")
  1055. return {'period_days': days, 'total_external_trades': 0, 'external_cancellations': 0}
  1056. # --- End Order Table Management ---
  1057. # =============================================================================
  1058. # TRADE LIFECYCLE MANAGEMENT - PHASE 4: UNIFIED TRADES TABLE
  1059. # =============================================================================
  1060. def create_trade_lifecycle(self, symbol: str, side: str, entry_order_id: Optional[str] = None,
  1061. stop_loss_price: Optional[float] = None, take_profit_price: Optional[float] = None,
  1062. trade_type: str = 'manual') -> Optional[str]:
  1063. """Create a new trade lifecycle when an entry order is placed."""
  1064. try:
  1065. lifecycle_id = str(uuid.uuid4())
  1066. query = """
  1067. INSERT INTO trades (
  1068. symbol, side, amount, price, value, trade_type, timestamp,
  1069. status, trade_lifecycle_id, position_side, entry_order_id,
  1070. stop_loss_price, take_profit_price, updated_at
  1071. ) VALUES (?, ?, 0, 0, 0, ?, ?, 'pending', ?, 'flat', ?, ?, ?, ?)
  1072. """
  1073. timestamp = datetime.now(timezone.utc).isoformat()
  1074. params = (symbol, side.lower(), trade_type, timestamp, lifecycle_id,
  1075. entry_order_id, stop_loss_price, take_profit_price, timestamp)
  1076. self._execute_query(query, params)
  1077. logger.info(f"📊 Created trade lifecycle {lifecycle_id}: {side.upper()} {symbol} (pending)")
  1078. return lifecycle_id
  1079. except Exception as e:
  1080. logger.error(f"❌ Error creating trade lifecycle: {e}")
  1081. return None
  1082. def update_trade_position_opened(self, lifecycle_id: str, entry_price: float,
  1083. entry_amount: float, exchange_fill_id: str) -> bool:
  1084. """Update trade when position is opened (entry order filled)."""
  1085. try:
  1086. query = """
  1087. UPDATE trades
  1088. SET status = 'position_opened',
  1089. amount = ?,
  1090. price = ?,
  1091. value = ?,
  1092. entry_price = ?,
  1093. current_position_size = ?,
  1094. position_side = CASE
  1095. WHEN side = 'buy' THEN 'long'
  1096. WHEN side = 'sell' THEN 'short'
  1097. ELSE position_side
  1098. END,
  1099. exchange_fill_id = ?,
  1100. position_opened_at = ?,
  1101. updated_at = ?
  1102. WHERE trade_lifecycle_id = ? AND status = 'pending'
  1103. """
  1104. timestamp = datetime.now(timezone.utc).isoformat()
  1105. value = entry_amount * entry_price
  1106. params = (entry_amount, entry_price, value, entry_price, entry_amount,
  1107. exchange_fill_id, timestamp, timestamp, lifecycle_id)
  1108. self._execute_query(query, params)
  1109. formatter = get_formatter()
  1110. trade_info = self.get_trade_by_lifecycle_id(lifecycle_id) # Fetch to get symbol for formatting
  1111. symbol_for_formatting = trade_info.get('symbol', 'UNKNOWN_SYMBOL') if trade_info else 'UNKNOWN_SYMBOL'
  1112. base_asset_for_amount = symbol_for_formatting.split('/')[0] if '/' in symbol_for_formatting else symbol_for_formatting
  1113. logger.info(f"📈 Trade lifecycle {lifecycle_id} position opened: {formatter.format_amount(entry_amount, base_asset_for_amount)} {symbol_for_formatting} @ {formatter.format_price(entry_price, symbol_for_formatting)}")
  1114. return True
  1115. except Exception as e:
  1116. logger.error(f"❌ Error updating trade position opened: {e}")
  1117. return False
  1118. def update_trade_position_closed(self, lifecycle_id: str, exit_price: float,
  1119. realized_pnl: float, exchange_fill_id: str) -> bool:
  1120. """Update trade when position is fully closed."""
  1121. try:
  1122. query = """
  1123. UPDATE trades
  1124. SET status = 'position_closed',
  1125. current_position_size = 0,
  1126. position_side = 'flat',
  1127. realized_pnl = ?,
  1128. position_closed_at = ?,
  1129. updated_at = ?
  1130. WHERE trade_lifecycle_id = ? AND status = 'position_opened'
  1131. """
  1132. timestamp = datetime.now(timezone.utc).isoformat()
  1133. params = (realized_pnl, timestamp, timestamp, lifecycle_id)
  1134. self._execute_query(query, params)
  1135. formatter = get_formatter()
  1136. trade_info = self.get_trade_by_lifecycle_id(lifecycle_id) # Fetch to get symbol for P&L formatting context
  1137. symbol_for_formatting = trade_info.get('symbol', 'USD') # Default to USD for PNL if symbol unknown
  1138. pnl_emoji = "🟢" if realized_pnl >= 0 else "🔴"
  1139. logger.info(f"{pnl_emoji} Trade lifecycle {lifecycle_id} position closed: P&L {formatter.format_price_with_symbol(realized_pnl)}")
  1140. return True
  1141. except Exception as e:
  1142. logger.error(f"❌ Error updating trade position closed: {e}")
  1143. return False
  1144. def update_trade_cancelled(self, lifecycle_id: str, reason: str = "order_cancelled") -> bool:
  1145. """Update trade when entry order is cancelled (never opened)."""
  1146. try:
  1147. query = """
  1148. UPDATE trades
  1149. SET status = 'cancelled',
  1150. notes = ?,
  1151. updated_at = ?
  1152. WHERE trade_lifecycle_id = ? AND status = 'pending'
  1153. """
  1154. timestamp = datetime.now(timezone.utc).isoformat()
  1155. params = (f"Cancelled: {reason}", timestamp, lifecycle_id)
  1156. self._execute_query(query, params)
  1157. logger.info(f"❌ Trade lifecycle {lifecycle_id} cancelled: {reason}")
  1158. return True
  1159. except Exception as e:
  1160. logger.error(f"❌ Error updating trade cancelled: {e}")
  1161. return False
  1162. def link_stop_loss_to_trade(self, lifecycle_id: str, stop_loss_order_id: str,
  1163. stop_loss_price: float) -> bool:
  1164. """Link a stop loss order to a trade lifecycle."""
  1165. try:
  1166. query = """
  1167. UPDATE trades
  1168. SET stop_loss_order_id = ?,
  1169. stop_loss_price = ?,
  1170. updated_at = ?
  1171. WHERE trade_lifecycle_id = ? AND status = 'position_opened'
  1172. """
  1173. timestamp = datetime.now(timezone.utc).isoformat()
  1174. params = (stop_loss_order_id, stop_loss_price, timestamp, lifecycle_id)
  1175. self._execute_query(query, params)
  1176. formatter = get_formatter()
  1177. trade_info = self.get_trade_by_lifecycle_id(lifecycle_id) # Fetch to get symbol for formatting
  1178. symbol_for_formatting = trade_info.get('symbol', 'UNKNOWN_SYMBOL') if trade_info else 'UNKNOWN_SYMBOL'
  1179. logger.info(f"🛑 Linked stop loss order {stop_loss_order_id} ({formatter.format_price(stop_loss_price, symbol_for_formatting)}) to trade {lifecycle_id}")
  1180. return True
  1181. except Exception as e:
  1182. logger.error(f"❌ Error linking stop loss to trade: {e}")
  1183. return False
  1184. def link_take_profit_to_trade(self, lifecycle_id: str, take_profit_order_id: str,
  1185. take_profit_price: float) -> bool:
  1186. """Link a take profit order to a trade lifecycle."""
  1187. try:
  1188. query = """
  1189. UPDATE trades
  1190. SET take_profit_order_id = ?,
  1191. take_profit_price = ?,
  1192. updated_at = ?
  1193. WHERE trade_lifecycle_id = ? AND status = 'position_opened'
  1194. """
  1195. timestamp = datetime.now(timezone.utc).isoformat()
  1196. params = (take_profit_order_id, take_profit_price, timestamp, lifecycle_id)
  1197. self._execute_query(query, params)
  1198. formatter = get_formatter()
  1199. trade_info = self.get_trade_by_lifecycle_id(lifecycle_id) # Fetch to get symbol for formatting
  1200. symbol_for_formatting = trade_info.get('symbol', 'UNKNOWN_SYMBOL') if trade_info else 'UNKNOWN_SYMBOL'
  1201. logger.info(f"🎯 Linked take profit order {take_profit_order_id} ({formatter.format_price(take_profit_price, symbol_for_formatting)}) to trade {lifecycle_id}")
  1202. return True
  1203. except Exception as e:
  1204. logger.error(f"❌ Error linking take profit to trade: {e}")
  1205. return False
  1206. def get_trade_by_lifecycle_id(self, lifecycle_id: str) -> Optional[Dict[str, Any]]:
  1207. """Get trade by lifecycle ID."""
  1208. query = "SELECT * FROM trades WHERE trade_lifecycle_id = ?"
  1209. return self._fetchone_query(query, (lifecycle_id,))
  1210. def get_trade_by_symbol_and_status(self, symbol: str, status: str = 'position_opened') -> Optional[Dict[str, Any]]:
  1211. """Get trade by symbol and status."""
  1212. query = "SELECT * FROM trades WHERE symbol = ? AND status = ? ORDER BY updated_at DESC LIMIT 1"
  1213. return self._fetchone_query(query, (symbol, status))
  1214. def get_open_positions(self, symbol: Optional[str] = None) -> List[Dict[str, Any]]:
  1215. """Get all open positions, optionally filtered by symbol."""
  1216. if symbol:
  1217. query = "SELECT * FROM trades WHERE status = 'position_opened' AND symbol = ? ORDER BY position_opened_at DESC"
  1218. return self._fetch_query(query, (symbol,))
  1219. else:
  1220. query = "SELECT * FROM trades WHERE status = 'position_opened' ORDER BY position_opened_at DESC"
  1221. return self._fetch_query(query)
  1222. def get_trades_by_status(self, status: str, limit: int = 50) -> List[Dict[str, Any]]:
  1223. """Get trades by status."""
  1224. query = "SELECT * FROM trades WHERE status = ? ORDER BY updated_at DESC LIMIT ?"
  1225. return self._fetch_query(query, (status, limit))
  1226. def get_lifecycle_by_entry_order_id(self, entry_exchange_order_id: str, status: Optional[str] = None) -> Optional[Dict[str, Any]]:
  1227. """Get a trade lifecycle by its entry_order_id (exchange ID) and optionally by status."""
  1228. if status:
  1229. query = "SELECT * FROM trades WHERE entry_order_id = ? AND status = ? LIMIT 1"
  1230. params = (entry_exchange_order_id, status)
  1231. else:
  1232. query = "SELECT * FROM trades WHERE entry_order_id = ? LIMIT 1"
  1233. params = (entry_exchange_order_id,)
  1234. return self._fetchone_query(query, params)
  1235. def get_lifecycle_by_sl_order_id(self, sl_exchange_order_id: str, status: str = 'position_opened') -> Optional[Dict[str, Any]]:
  1236. """Get an active trade lifecycle by its stop_loss_order_id (exchange ID)."""
  1237. query = "SELECT * FROM trades WHERE stop_loss_order_id = ? AND status = ? LIMIT 1"
  1238. return self._fetchone_query(query, (sl_exchange_order_id, status))
  1239. def get_lifecycle_by_tp_order_id(self, tp_exchange_order_id: str, status: str = 'position_opened') -> Optional[Dict[str, Any]]:
  1240. """Get an active trade lifecycle by its take_profit_order_id (exchange ID)."""
  1241. query = "SELECT * FROM trades WHERE take_profit_order_id = ? AND status = ? LIMIT 1"
  1242. return self._fetchone_query(query, (tp_exchange_order_id, status))
  1243. def get_pending_stop_loss_activations(self) -> List[Dict[str, Any]]:
  1244. """Get open positions that need stop loss activation."""
  1245. query = """
  1246. SELECT * FROM trades
  1247. WHERE status = 'position_opened'
  1248. AND stop_loss_price IS NOT NULL
  1249. AND stop_loss_order_id IS NULL
  1250. ORDER BY updated_at ASC
  1251. """
  1252. return self._fetch_query(query)
  1253. def cleanup_old_cancelled_trades(self, days_old: int = 7) -> int:
  1254. """Clean up old cancelled trades (optional - for housekeeping)."""
  1255. try:
  1256. cutoff_date = (datetime.now(timezone.utc) - timedelta(days=days_old)).isoformat()
  1257. # Count before deletion
  1258. count_query = """
  1259. SELECT COUNT(*) as count FROM trades
  1260. WHERE status = 'cancelled' AND updated_at < ?
  1261. """
  1262. count_result = self._fetchone_query(count_query, (cutoff_date,))
  1263. count_to_delete = count_result['count'] if count_result else 0
  1264. if count_to_delete > 0:
  1265. delete_query = """
  1266. DELETE FROM trades
  1267. WHERE status = 'cancelled' AND updated_at < ?
  1268. """
  1269. self._execute_query(delete_query, (cutoff_date,))
  1270. logger.info(f"🧹 Cleaned up {count_to_delete} old cancelled trades (older than {days_old} days)")
  1271. return count_to_delete
  1272. except Exception as e:
  1273. logger.error(f"❌ Error cleaning up old cancelled trades: {e}")
  1274. return 0
  1275. def confirm_position_with_exchange(self, symbol: str, exchange_position_size: float,
  1276. exchange_open_orders: List[Dict]) -> bool:
  1277. """🆕 PHASE 4: Confirm position status with exchange before updating status."""
  1278. try:
  1279. # Get current trade status
  1280. current_trade = self.get_trade_by_symbol_and_status(symbol, 'position_opened')
  1281. if not current_trade:
  1282. return True # No open position to confirm
  1283. lifecycle_id = current_trade['trade_lifecycle_id']
  1284. has_open_orders = len([o for o in exchange_open_orders if o.get('symbol') == symbol]) > 0
  1285. # Only close position if exchange confirms no position AND no pending orders
  1286. if abs(exchange_position_size) < 1e-8 and not has_open_orders:
  1287. # Calculate realized P&L based on position side
  1288. position_side = current_trade['position_side']
  1289. entry_price_db = current_trade['entry_price'] # entry_price from db
  1290. # current_amount = current_trade['current_position_size'] # Not directly used for PNL calc here
  1291. # For a closed position, we need to calculate final P&L
  1292. # This would typically come from the closing trade, but for confirmation we estimate
  1293. estimated_pnl = current_trade.get('realized_pnl', 0) # Use existing realized_pnl if any
  1294. success = self.update_trade_position_closed(
  1295. lifecycle_id,
  1296. entry_price_db, # Using entry price from DB as estimate since position is confirmed closed
  1297. estimated_pnl,
  1298. "exchange_confirmed_closed"
  1299. )
  1300. if success:
  1301. logger.info(f"✅ Confirmed position closed for {symbol} with exchange")
  1302. return success
  1303. return True # Position still exists on exchange, no update needed
  1304. except Exception as e:
  1305. logger.error(f"❌ Error confirming position with exchange: {e}")
  1306. return False
  1307. def update_trade_market_data(self,
  1308. trade_lifecycle_id: str,
  1309. unrealized_pnl: Optional[float] = None,
  1310. mark_price: Optional[float] = None,
  1311. current_position_size: Optional[float] = None,
  1312. entry_price: Optional[float] = None,
  1313. liquidation_price: Optional[float] = None,
  1314. margin_used: Optional[float] = None,
  1315. leverage: Optional[float] = None,
  1316. position_value: Optional[float] = None,
  1317. unrealized_pnl_percentage: Optional[float] = None) -> bool:
  1318. """Update market-related data for an open trade lifecycle.
  1319. Only updates fields for which a non-None value is provided.
  1320. """
  1321. try:
  1322. updates = []
  1323. params = []
  1324. if unrealized_pnl is not None:
  1325. updates.append("unrealized_pnl = ?")
  1326. params.append(unrealized_pnl)
  1327. if mark_price is not None:
  1328. updates.append("mark_price = ?")
  1329. params.append(mark_price)
  1330. if current_position_size is not None:
  1331. updates.append("current_position_size = ?")
  1332. params.append(current_position_size)
  1333. if entry_price is not None: # If exchange provides updated avg entry
  1334. updates.append("entry_price = ?")
  1335. params.append(entry_price)
  1336. if liquidation_price is not None:
  1337. updates.append("liquidation_price = ?")
  1338. params.append(liquidation_price)
  1339. if margin_used is not None:
  1340. updates.append("margin_used = ?")
  1341. params.append(margin_used)
  1342. if leverage is not None:
  1343. updates.append("leverage = ?")
  1344. params.append(leverage)
  1345. if position_value is not None:
  1346. updates.append("position_value = ?")
  1347. params.append(position_value)
  1348. if unrealized_pnl_percentage is not None:
  1349. updates.append("unrealized_pnl_percentage = ?")
  1350. params.append(unrealized_pnl_percentage)
  1351. if not updates:
  1352. logger.debug(f"No market data fields provided to update for lifecycle {trade_lifecycle_id}.")
  1353. return True # No update needed, not an error
  1354. timestamp = datetime.now(timezone.utc).isoformat()
  1355. updates.append("updated_at = ?")
  1356. params.append(timestamp)
  1357. set_clause = ", ".join(updates)
  1358. query = f"""
  1359. UPDATE trades
  1360. SET {set_clause}
  1361. WHERE trade_lifecycle_id = ? AND status = 'position_opened'
  1362. """
  1363. params.append(trade_lifecycle_id)
  1364. # Use the class's own connection self.conn
  1365. cursor = self.conn.cursor()
  1366. cursor.execute(query, tuple(params))
  1367. self.conn.commit()
  1368. updated_rows = cursor.rowcount
  1369. if updated_rows > 0:
  1370. logger.debug(f"💹 Updated market data for lifecycle {trade_lifecycle_id}. Fields: {updates}")
  1371. return True
  1372. else:
  1373. # This might happen if the lifecycle ID doesn't exist or status is not 'position_opened'
  1374. # logger.warning(f"⚠️ No trade found or not in 'position_opened' state for lifecycle {trade_lifecycle_id} to update market data.")
  1375. return False # Not necessarily an error
  1376. except Exception as e:
  1377. logger.error(f"❌ Error updating market data for trade lifecycle {trade_lifecycle_id}: {e}")
  1378. return False
  1379. # --- End Trade Lifecycle Management ---
  1380. def get_daily_balance_record_count(self) -> int:
  1381. """Get the total number of daily balance records."""
  1382. row = self._fetchone_query("SELECT COUNT(*) as count FROM daily_balances")
  1383. return row['count'] if row and 'count' in row else 0
  1384. # 🆕 PHASE 5: AGGREGATION AND PURGING LOGIC
  1385. def _migrate_trade_to_aggregated_stats(self, trade_lifecycle_id: str):
  1386. """Migrate a completed/cancelled trade's stats to aggregate tables and delete the original trade."""
  1387. trade_data = self.get_trade_by_lifecycle_id(trade_lifecycle_id)
  1388. if not trade_data:
  1389. logger.error(f"Cannot migrate trade {trade_lifecycle_id}: Not found.")
  1390. return
  1391. status = trade_data.get('status')
  1392. symbol = trade_data.get('symbol')
  1393. token = symbol.split('/')[0] if symbol and '/' in symbol else symbol # Assuming symbol like BTC/USDT
  1394. if not token:
  1395. logger.error(f"Cannot migrate trade {trade_lifecycle_id}: Token could not be derived from symbol '{symbol}'.")
  1396. return
  1397. now_iso = datetime.now(timezone.utc).isoformat()
  1398. try:
  1399. with self.conn: # Ensures atomicity for the operations below
  1400. if status == 'position_closed':
  1401. realized_pnl = trade_data.get('realized_pnl', 0.0)
  1402. # Use entry value if available, otherwise value (amount * price at entry)
  1403. entry_value = trade_data.get('value', 0.0) # 'value' is amount * price from initial trade record
  1404. # For exit_value, we'd ideally have the value of the closing trade(s).
  1405. # If the 'realized_pnl' is from the trade record, and 'entry_value' is entry, exit_value = entry_value + realized_pnl
  1406. exit_value = entry_value + realized_pnl
  1407. closed_at_str = trade_data.get('position_closed_at', now_iso)
  1408. closed_at_dt = datetime.fromisoformat(closed_at_str)
  1409. date_str = closed_at_dt.strftime('%Y-%m-%d')
  1410. # Update token_stats
  1411. token_upsert_query = """
  1412. INSERT INTO token_stats (
  1413. token, total_realized_pnl, total_completed_cycles, winning_cycles, losing_cycles,
  1414. total_entry_volume, total_exit_volume, sum_of_winning_pnl, sum_of_losing_pnl,
  1415. largest_winning_cycle_pnl, largest_losing_cycle_pnl,
  1416. first_cycle_closed_at, last_cycle_closed_at, updated_at
  1417. ) VALUES (?, ?, 1, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
  1418. ON CONFLICT(token) DO UPDATE SET
  1419. total_realized_pnl = total_realized_pnl + excluded.total_realized_pnl,
  1420. total_completed_cycles = total_completed_cycles + 1,
  1421. winning_cycles = winning_cycles + excluded.winning_cycles,
  1422. losing_cycles = losing_cycles + excluded.losing_cycles,
  1423. total_entry_volume = total_entry_volume + excluded.total_entry_volume,
  1424. total_exit_volume = total_exit_volume + excluded.total_exit_volume,
  1425. sum_of_winning_pnl = sum_of_winning_pnl + excluded.sum_of_winning_pnl,
  1426. sum_of_losing_pnl = sum_of_losing_pnl + excluded.sum_of_losing_pnl,
  1427. largest_winning_cycle_pnl = MAX(largest_winning_cycle_pnl, excluded.largest_winning_cycle_pnl),
  1428. largest_losing_cycle_pnl = MAX(largest_losing_cycle_pnl, excluded.largest_losing_cycle_pnl),
  1429. first_cycle_closed_at = MIN(first_cycle_closed_at, excluded.first_cycle_closed_at),
  1430. last_cycle_closed_at = MAX(last_cycle_closed_at, excluded.last_cycle_closed_at),
  1431. updated_at = excluded.updated_at
  1432. """
  1433. is_win = 1 if realized_pnl > 0 else 0
  1434. is_loss = 1 if realized_pnl < 0 else 0
  1435. win_pnl_contrib = realized_pnl if realized_pnl > 0 else 0.0
  1436. loss_pnl_contrib = abs(realized_pnl) if realized_pnl < 0 else 0.0
  1437. self._execute_query(token_upsert_query, (
  1438. token, realized_pnl, is_win, is_loss, entry_value, exit_value,
  1439. win_pnl_contrib, loss_pnl_contrib, win_pnl_contrib, loss_pnl_contrib,
  1440. closed_at_str, closed_at_str, now_iso
  1441. ))
  1442. # Update daily_aggregated_stats
  1443. daily_upsert_query = """
  1444. INSERT INTO daily_aggregated_stats (
  1445. date, token, realized_pnl, completed_cycles, entry_volume, exit_volume
  1446. ) VALUES (?, ?, ?, 1, ?, ?)
  1447. ON CONFLICT(date, token) DO UPDATE SET
  1448. realized_pnl = realized_pnl + excluded.realized_pnl,
  1449. completed_cycles = completed_cycles + 1,
  1450. entry_volume = entry_volume + excluded.entry_volume,
  1451. exit_volume = exit_volume + excluded.exit_volume
  1452. """
  1453. self._execute_query(daily_upsert_query, (
  1454. date_str, token, realized_pnl, entry_value, exit_value
  1455. ))
  1456. logger.info(f"Aggregated stats for closed trade lifecycle {trade_lifecycle_id} ({token}). PNL: {realized_pnl:.2f}")
  1457. elif status == 'cancelled':
  1458. # Update token_stats for cancelled count
  1459. cancelled_upsert_query = """
  1460. INSERT INTO token_stats (token, total_cancelled_cycles, updated_at)
  1461. VALUES (?, 1, ?)
  1462. ON CONFLICT(token) DO UPDATE SET
  1463. total_cancelled_cycles = total_cancelled_cycles + 1,
  1464. updated_at = excluded.updated_at
  1465. """
  1466. self._execute_query(cancelled_upsert_query, (token, now_iso))
  1467. logger.info(f"Incremented cancelled_cycles for {token} due to lifecycle {trade_lifecycle_id}.")
  1468. # Delete the original trade from the 'trades' table
  1469. self._execute_query("DELETE FROM trades WHERE trade_lifecycle_id = ?", (trade_lifecycle_id,))
  1470. logger.info(f"Deleted trade lifecycle {trade_lifecycle_id} from trades table after aggregation.")
  1471. except sqlite3.Error as e:
  1472. logger.error(f"Database error migrating trade {trade_lifecycle_id} to aggregate stats: {e}", exc_info=True)
  1473. except Exception as e:
  1474. logger.error(f"Unexpected error migrating trade {trade_lifecycle_id} to aggregate stats: {e}", exc_info=True)
  1475. def purge_old_daily_aggregated_stats(self, months_to_keep: int = 10):
  1476. """Purge records from daily_aggregated_stats older than a specified number of months."""
  1477. if months_to_keep <= 0:
  1478. logger.info("Not purging daily_aggregated_stats as months_to_keep is not positive.")
  1479. return
  1480. try:
  1481. # Calculate the cutoff date
  1482. # This is a bit simplified; for more precise month calculations, dateutil.relativedelta might be used
  1483. # For SQLite, comparing YYYY-MM-DD strings works well.
  1484. cutoff_date = datetime.now(timezone.utc).date() - timedelta(days=months_to_keep * 30) # Approximate
  1485. cutoff_date_str = cutoff_date.strftime('%Y-%m-%d')
  1486. query = "DELETE FROM daily_aggregated_stats WHERE date < ?"
  1487. # To count before deleting (optional, for logging)
  1488. # count_query = "SELECT COUNT(*) as count FROM daily_aggregated_stats WHERE date < ?"
  1489. # before_count_row = self._fetchone_query(count_query, (cutoff_date_str,))
  1490. # num_to_delete = before_count_row['count'] if before_count_row else 0
  1491. with self.conn:
  1492. cursor = self.conn.cursor()
  1493. cursor.execute(query, (cutoff_date_str,))
  1494. rows_deleted = cursor.rowcount
  1495. if rows_deleted > 0:
  1496. logger.info(f"Purged {rows_deleted} old records from daily_aggregated_stats (older than approx. {months_to_keep} months, before {cutoff_date_str}).")
  1497. else:
  1498. logger.info(f"No old records found in daily_aggregated_stats to purge (older than approx. {months_to_keep} months, before {cutoff_date_str}).")
  1499. except sqlite3.Error as e:
  1500. logger.error(f"Database error purging old daily_aggregated_stats: {e}", exc_info=True)
  1501. except Exception as e:
  1502. logger.error(f"Unexpected error purging old daily_aggregated_stats: {e}", exc_info=True)