trading_stats.py 80 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622
  1. #!/usr/bin/env python3
  2. """
  3. Trading Statistics Tracker (SQLite Version)
  4. Tracks and calculates comprehensive trading statistics using an SQLite database.
  5. """
  6. import sqlite3
  7. import os
  8. import logging
  9. from datetime import datetime, timedelta, timezone
  10. from typing import Dict, List, Any, Optional, Tuple, Union
  11. import math
  12. from collections import defaultdict
  13. import uuid
  14. # 🆕 Import the migration runner
  15. from src.migrations.migrate_db import run_migrations as run_db_migrations
  16. logger = logging.getLogger(__name__)
  17. def _normalize_token_case(token: str) -> str:
  18. """
  19. Normalize token case: if any characters are already uppercase, keep as-is.
  20. Otherwise, convert to uppercase. This handles mixed-case tokens like kPEPE, kBONK.
  21. """
  22. # Check if any character is already uppercase
  23. if any(c.isupper() for c in token):
  24. return token # Keep original case for mixed-case tokens
  25. else:
  26. return token.upper() # Convert to uppercase for all-lowercase input
  27. class TradingStats:
  28. """Comprehensive trading statistics tracker using SQLite."""
  29. def __init__(self, db_path: str = "data/trading_stats.sqlite"):
  30. """Initialize the stats tracker and connect to SQLite DB."""
  31. self.db_path = db_path
  32. self._ensure_data_directory()
  33. # 🆕 Run database migrations before connecting and creating tables
  34. # This ensures the schema is up-to-date when the connection is made
  35. # and tables are potentially created for the first time.
  36. logger.info("Running database migrations if needed...")
  37. run_db_migrations() # Uses DB_PATH defined in migrate_db.py, which should be the same
  38. logger.info("Database migration check complete.")
  39. self.conn = sqlite3.connect(self.db_path, detect_types=sqlite3.PARSE_DECLTYPES | sqlite3.PARSE_COLNAMES)
  40. self.conn.row_factory = self._dict_factory
  41. self._create_tables() # CREATE IF NOT EXISTS will still be useful for first-time setup
  42. self._initialize_metadata() # Also potentially sets schema_version if DB was just created
  43. def _dict_factory(self, cursor, row):
  44. """Convert SQLite rows to dictionaries."""
  45. d = {}
  46. for idx, col in enumerate(cursor.description):
  47. d[col[0]] = row[idx]
  48. return d
  49. def _ensure_data_directory(self):
  50. """Ensure the data directory for the SQLite file exists."""
  51. data_dir = os.path.dirname(self.db_path)
  52. if data_dir and not os.path.exists(data_dir):
  53. os.makedirs(data_dir)
  54. logger.info(f"Created data directory for TradingStats DB: {data_dir}")
  55. def _execute_query(self, query: str, params: tuple = ()):
  56. """Execute a query (INSERT, UPDATE, DELETE)."""
  57. with self.conn:
  58. self.conn.execute(query, params)
  59. def _fetch_query(self, query: str, params: tuple = ()) -> List[Dict[str, Any]]:
  60. """Execute a SELECT query and fetch all results."""
  61. cur = self.conn.cursor()
  62. cur.execute(query, params)
  63. return cur.fetchall()
  64. def _fetchone_query(self, query: str, params: tuple = ()) -> Optional[Dict[str, Any]]:
  65. """Execute a SELECT query and fetch one result."""
  66. cur = self.conn.cursor()
  67. cur.execute(query, params)
  68. return cur.fetchone()
  69. def _create_tables(self):
  70. """Create SQLite tables if they don't exist."""
  71. queries = [
  72. """
  73. CREATE TABLE IF NOT EXISTS metadata (
  74. key TEXT PRIMARY KEY,
  75. value TEXT
  76. )
  77. """,
  78. """
  79. CREATE TABLE IF NOT EXISTS trades (
  80. id INTEGER PRIMARY KEY AUTOINCREMENT,
  81. exchange_fill_id TEXT UNIQUE,
  82. timestamp TEXT NOT NULL,
  83. symbol TEXT NOT NULL,
  84. side TEXT NOT NULL,
  85. amount REAL NOT NULL,
  86. price REAL NOT NULL,
  87. value REAL NOT NULL,
  88. trade_type TEXT NOT NULL,
  89. pnl REAL DEFAULT 0.0,
  90. linked_order_table_id INTEGER,
  91. -- 🆕 PHASE 4: Lifecycle tracking fields (merged from active_trades)
  92. status TEXT DEFAULT 'executed', -- 'pending', 'executed', 'position_opened', 'position_closed', 'cancelled'
  93. trade_lifecycle_id TEXT, -- Groups related trades into one lifecycle
  94. position_side TEXT, -- 'long', 'short', 'flat' - the resulting position side
  95. -- Position tracking
  96. entry_price REAL,
  97. current_position_size REAL DEFAULT 0,
  98. -- Order IDs (exchange IDs)
  99. entry_order_id TEXT,
  100. stop_loss_order_id TEXT,
  101. take_profit_order_id TEXT,
  102. -- Risk management
  103. stop_loss_price REAL,
  104. take_profit_price REAL,
  105. -- P&L tracking
  106. realized_pnl REAL DEFAULT 0,
  107. unrealized_pnl REAL DEFAULT 0,
  108. mark_price REAL DEFAULT 0,
  109. position_value REAL DEFAULT NULL,
  110. -- Risk Info from Exchange
  111. liquidation_price REAL DEFAULT NULL,
  112. margin_used REAL DEFAULT NULL,
  113. leverage REAL DEFAULT NULL,
  114. -- Timestamps
  115. position_opened_at TEXT,
  116. position_closed_at TEXT,
  117. updated_at TEXT DEFAULT CURRENT_TIMESTAMP,
  118. -- Notes
  119. notes TEXT
  120. )
  121. """,
  122. """
  123. CREATE TABLE IF NOT EXISTS daily_balances (
  124. date TEXT PRIMARY KEY,
  125. balance REAL NOT NULL,
  126. timestamp TEXT NOT NULL
  127. )
  128. """,
  129. """
  130. CREATE TABLE IF NOT EXISTS balance_adjustments (
  131. id INTEGER PRIMARY KEY AUTOINCREMENT,
  132. adjustment_id TEXT UNIQUE,
  133. timestamp TEXT NOT NULL,
  134. type TEXT NOT NULL, -- 'deposit' or 'withdrawal'
  135. amount REAL NOT NULL, -- Always positive, type indicates direction
  136. description TEXT
  137. )
  138. """,
  139. """
  140. CREATE TABLE IF NOT EXISTS orders (
  141. id INTEGER PRIMARY KEY AUTOINCREMENT,
  142. bot_order_ref_id TEXT UNIQUE,
  143. exchange_order_id TEXT UNIQUE,
  144. symbol TEXT NOT NULL,
  145. side TEXT NOT NULL,
  146. type TEXT NOT NULL,
  147. amount_requested REAL NOT NULL,
  148. amount_filled REAL DEFAULT 0.0,
  149. price REAL, -- For limit, stop, etc.
  150. status TEXT NOT NULL, -- e.g., 'open', 'partially_filled', 'filled', 'cancelled', 'rejected', 'expired', 'pending_trigger'
  151. timestamp_created TEXT NOT NULL,
  152. timestamp_updated TEXT NOT NULL,
  153. parent_bot_order_ref_id TEXT NULLABLE -- To link conditional orders (like SL triggers) to their parent order
  154. )
  155. """,
  156. """
  157. CREATE INDEX IF NOT EXISTS idx_orders_bot_order_ref_id ON orders (bot_order_ref_id);
  158. """,
  159. """
  160. CREATE INDEX IF NOT EXISTS idx_orders_exchange_order_id ON orders (exchange_order_id);
  161. """,
  162. """
  163. CREATE INDEX IF NOT EXISTS idx_trades_exchange_fill_id ON trades (exchange_fill_id);
  164. """,
  165. """
  166. CREATE INDEX IF NOT EXISTS idx_trades_linked_order_table_id ON trades (linked_order_table_id);
  167. """,
  168. """
  169. CREATE INDEX IF NOT EXISTS idx_orders_parent_bot_order_ref_id ON orders (parent_bot_order_ref_id);
  170. """,
  171. """
  172. CREATE INDEX IF NOT EXISTS idx_orders_status_type ON orders (status, type);
  173. """,
  174. """
  175. CREATE INDEX IF NOT EXISTS idx_trades_status ON trades (status);
  176. """,
  177. """
  178. CREATE INDEX IF NOT EXISTS idx_trades_lifecycle_id ON trades (trade_lifecycle_id);
  179. """,
  180. """
  181. CREATE INDEX IF NOT EXISTS idx_trades_position_side ON trades (position_side);
  182. """,
  183. """
  184. CREATE INDEX IF NOT EXISTS idx_trades_symbol_status ON trades (symbol, status);
  185. """
  186. ]
  187. for query in queries:
  188. self._execute_query(query)
  189. logger.info("SQLite tables ensured for TradingStats.")
  190. def _initialize_metadata(self):
  191. """Initialize metadata if not already present."""
  192. start_date = self._get_metadata('start_date')
  193. initial_balance = self._get_metadata('initial_balance')
  194. if start_date is None:
  195. self._set_metadata('start_date', datetime.now(timezone.utc).isoformat())
  196. logger.info("Initialized 'start_date' in metadata.")
  197. if initial_balance is None:
  198. self._set_metadata('initial_balance', '0.0')
  199. logger.info("Initialized 'initial_balance' in metadata.")
  200. logger.info(f"TradingStats initialized. Start Date: {self._get_metadata('start_date')}, Initial Balance: {self._get_metadata('initial_balance')}")
  201. def _get_metadata(self, key: str) -> Optional[str]:
  202. """Retrieve a value from the metadata table."""
  203. row = self._fetchone_query("SELECT value FROM metadata WHERE key = ?", (key,))
  204. return row['value'] if row else None
  205. def _set_metadata(self, key: str, value: str):
  206. """Set a value in the metadata table."""
  207. self._execute_query("INSERT OR REPLACE INTO metadata (key, value) VALUES (?, ?)", (key, value))
  208. def set_initial_balance(self, balance: float):
  209. """Set the initial balance if not already set or zero."""
  210. current_initial_balance_str = self._get_metadata('initial_balance')
  211. current_initial_balance = float(current_initial_balance_str) if current_initial_balance_str else 0.0
  212. if current_initial_balance == 0.0: # Only set if it's effectively unset
  213. self._set_metadata('initial_balance', str(balance))
  214. # Also set start_date if it's the first time setting balance
  215. if self._get_metadata('start_date') is None or float(current_initial_balance_str if current_initial_balance_str else '0.0') == 0.0:
  216. self._set_metadata('start_date', datetime.now(timezone.utc).isoformat())
  217. logger.info(f"Initial balance set to: ${balance:.2f}")
  218. else:
  219. logger.info(f"Initial balance already set to ${current_initial_balance:.2f}. Not changing.")
  220. def record_balance(self, balance: float):
  221. """Record daily balance snapshot."""
  222. today_iso = datetime.now(timezone.utc).date().isoformat()
  223. now_iso = datetime.now(timezone.utc).isoformat()
  224. existing_entry = self._fetchone_query("SELECT date FROM daily_balances WHERE date = ?", (today_iso,))
  225. if existing_entry:
  226. self._execute_query("UPDATE daily_balances SET balance = ?, timestamp = ? WHERE date = ?",
  227. (balance, now_iso, today_iso))
  228. else:
  229. self._execute_query("INSERT INTO daily_balances (date, balance, timestamp) VALUES (?, ?, ?)",
  230. (today_iso, balance, now_iso))
  231. # logger.debug(f"Recorded balance for {today_iso}: ${balance:.2f}") # Potentially too verbose
  232. def record_trade(self, symbol: str, side: str, amount: float, price: float,
  233. exchange_fill_id: Optional[str] = None, trade_type: str = "manual",
  234. pnl: Optional[float] = None, timestamp: Optional[str] = None,
  235. linked_order_table_id_to_link: Optional[int] = None):
  236. """Record a trade in the database."""
  237. if timestamp is None:
  238. timestamp = datetime.now(timezone.utc).isoformat()
  239. value = amount * price
  240. self._execute_query(
  241. "INSERT OR IGNORE INTO trades (symbol, side, amount, price, value, trade_type, timestamp, exchange_fill_id, pnl, linked_order_table_id) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
  242. (symbol, side, amount, price, value, trade_type, timestamp, exchange_fill_id, pnl or 0.0, linked_order_table_id_to_link)
  243. )
  244. logger.info(f"📈 Trade recorded: {side.upper()} {amount:.6f} {symbol} @ ${price:.2f} (${value:.2f}) [{trade_type}]")
  245. def get_all_trades(self) -> List[Dict[str, Any]]:
  246. """Fetch all trades from the database, ordered by timestamp."""
  247. return self._fetch_query("SELECT * FROM trades ORDER BY timestamp ASC")
  248. def calculate_completed_trade_cycles(self) -> List[Dict[str, Any]]:
  249. """
  250. Calculate completed trade cycles (full position open to close) using FIFO method from DB trades.
  251. Handles both long and short cycles. PNL is summed from individual trade records.
  252. """
  253. completed_cycles = []
  254. # symbol -> {
  255. # 'open_legs': [{'side', 'amount_remaining', 'price', 'timestamp', 'value', 'pnl_contribution'}], # Holds fills of the current open leg
  256. # 'cycle_trades_details': [trade_dict_from_db], # All trades part of the current forming cycle
  257. # 'cycle_start_ts': timestamp_str,
  258. # 'current_leg_type': 'long' | 'short' | None
  259. # }
  260. open_positions_data = defaultdict(lambda: {
  261. 'open_legs': [],
  262. 'cycle_trades_details': [],
  263. 'cycle_start_ts': None,
  264. 'current_leg_type': None
  265. })
  266. all_trades = self.get_all_trades() # Trades now include their 'pnl' contribution
  267. for trade in all_trades:
  268. symbol = trade['symbol']
  269. side = trade['side'].lower() # Ensure lowercase
  270. amount = trade['amount']
  271. price = trade['price']
  272. timestamp = trade['timestamp']
  273. trade_pnl = trade.get('pnl', 0.0) # PNL from this specific fill
  274. pos_data = open_positions_data[symbol]
  275. current_trade_detail = {**trade} # Copy trade details
  276. if pos_data['current_leg_type'] is None: # Starting a new potential cycle
  277. pos_data['current_leg_type'] = 'long' if side == 'buy' else 'short'
  278. pos_data['cycle_start_ts'] = timestamp
  279. pos_data['open_legs'].append({
  280. 'side': side, 'amount_remaining': amount, 'price': price,
  281. 'timestamp': timestamp, 'value': amount * price,
  282. 'pnl_contribution': trade_pnl # PNL of opening trade usually 0
  283. })
  284. pos_data['cycle_trades_details'] = [current_trade_detail]
  285. elif (side == 'buy' and pos_data['current_leg_type'] == 'long') or \
  286. (side == 'sell' and pos_data['current_leg_type'] == 'short'):
  287. # Increasing an existing long or short position
  288. pos_data['open_legs'].append({
  289. 'side': side, 'amount_remaining': amount, 'price': price,
  290. 'timestamp': timestamp, 'value': amount * price,
  291. 'pnl_contribution': trade_pnl
  292. })
  293. pos_data['cycle_trades_details'].append(current_trade_detail)
  294. elif (side == 'sell' and pos_data['current_leg_type'] == 'long'): # Selling to reduce/close long
  295. pos_data['cycle_trades_details'].append(current_trade_detail)
  296. sell_amount_remaining = amount
  297. while sell_amount_remaining > 0 and pos_data['open_legs']:
  298. oldest_leg_fill = pos_data['open_legs'][0] # FIFO
  299. match_amount = min(sell_amount_remaining, oldest_leg_fill['amount_remaining'])
  300. oldest_leg_fill['amount_remaining'] -= match_amount
  301. sell_amount_remaining -= match_amount
  302. if oldest_leg_fill['amount_remaining'] <= 1e-9:
  303. pos_data['open_legs'].pop(0)
  304. if not pos_data['open_legs']: # Long cycle closed
  305. # Compile cycle
  306. cycle_pnl = sum(t.get('pnl', 0.0) for t in pos_data['cycle_trades_details'])
  307. cycle_buys = [t for t in pos_data['cycle_trades_details'] if t['side'] == 'buy']
  308. cycle_sells = [t for t in pos_data['cycle_trades_details'] if t['side'] == 'sell']
  309. total_amount_bought = sum(t['amount'] for t in cycle_buys)
  310. total_buy_value = sum(t['value'] for t in cycle_buys)
  311. total_amount_sold = sum(t['amount'] for t in cycle_sells) # Should match total_amount_bought
  312. total_sell_value = sum(t['value'] for t in cycle_sells)
  313. completed_cycle = {
  314. 'symbol': symbol,
  315. 'token': symbol.split('/')[0] if '/' in symbol else symbol,
  316. 'cycle_start': pos_data['cycle_start_ts'],
  317. 'cycle_end': timestamp, # End time is the timestamp of the closing trade
  318. 'cycle_type': 'long',
  319. 'buy_orders': len(cycle_buys),
  320. 'sell_orders': len(cycle_sells),
  321. 'total_orders': len(pos_data['cycle_trades_details']),
  322. 'total_amount': total_amount_bought,
  323. 'avg_entry_price': total_buy_value / total_amount_bought if total_amount_bought > 0 else 0,
  324. 'avg_exit_price': total_sell_value / total_amount_sold if total_amount_sold > 0 else 0,
  325. 'total_pnl': cycle_pnl,
  326. 'buy_value': total_buy_value,
  327. 'sell_value': total_sell_value,
  328. 'cycle_trades': pos_data['cycle_trades_details'].copy()
  329. }
  330. completed_cycles.append(completed_cycle)
  331. # Reset for next cycle, potentially flip if sell_amount_remaining > 0
  332. pos_data['cycle_trades_details'] = []
  333. pos_data['cycle_start_ts'] = None
  334. pos_data['current_leg_type'] = None
  335. if sell_amount_remaining > 1e-9: # Flipped to short
  336. pos_data['current_leg_type'] = 'short'
  337. pos_data['cycle_start_ts'] = timestamp
  338. pos_data['open_legs'].append({
  339. 'side': 'sell', 'amount_remaining': sell_amount_remaining, 'price': price,
  340. 'timestamp': timestamp, 'value': sell_amount_remaining * price,
  341. 'pnl_contribution': trade_pnl # PNL of this fill if it was part of closing previous and opening this
  342. })
  343. pos_data['cycle_trades_details'] = [current_trade_detail] # Start new details list with current trade
  344. elif (side == 'buy' and pos_data['current_leg_type'] == 'short'): # Buying to reduce/close short
  345. pos_data['cycle_trades_details'].append(current_trade_detail)
  346. buy_amount_remaining = amount
  347. while buy_amount_remaining > 0 and pos_data['open_legs']:
  348. oldest_leg_fill = pos_data['open_legs'][0] # FIFO
  349. match_amount = min(buy_amount_remaining, oldest_leg_fill['amount_remaining'])
  350. oldest_leg_fill['amount_remaining'] -= match_amount
  351. buy_amount_remaining -= match_amount
  352. if oldest_leg_fill['amount_remaining'] <= 1e-9:
  353. pos_data['open_legs'].pop(0)
  354. if not pos_data['open_legs']: # Short cycle closed
  355. # Compile cycle
  356. cycle_pnl = sum(t.get('pnl', 0.0) for t in pos_data['cycle_trades_details'])
  357. cycle_sells = [t for t in pos_data['cycle_trades_details'] if t['side'] == 'sell'] # Entry for short
  358. cycle_buys = [t for t in pos_data['cycle_trades_details'] if t['side'] == 'buy'] # Exit for short
  359. total_amount_sold = sum(t['amount'] for t in cycle_sells)
  360. total_sell_value = sum(t['value'] for t in cycle_sells)
  361. total_amount_bought = sum(t['amount'] for t in cycle_buys) # Should match total_amount_sold
  362. total_buy_value = sum(t['value'] for t in cycle_buys)
  363. completed_cycle = {
  364. 'symbol': symbol,
  365. 'token': symbol.split('/')[0] if '/' in symbol else symbol,
  366. 'cycle_start': pos_data['cycle_start_ts'],
  367. 'cycle_end': timestamp,
  368. 'cycle_type': 'short',
  369. 'sell_orders': len(cycle_sells), # Entry orders for short
  370. 'buy_orders': len(cycle_buys), # Exit orders for short
  371. 'total_orders': len(pos_data['cycle_trades_details']),
  372. 'total_amount': total_amount_sold, # Amount that formed the basis of the short
  373. 'avg_entry_price': total_sell_value / total_amount_sold if total_amount_sold > 0 else 0, # Avg sell price
  374. 'avg_exit_price': total_buy_value / total_amount_bought if total_amount_bought > 0 else 0, # Avg buy price
  375. 'total_pnl': cycle_pnl,
  376. 'sell_value': total_sell_value, # Entry value for short
  377. 'buy_value': total_buy_value, # Exit value for short
  378. 'cycle_trades': pos_data['cycle_trades_details'].copy()
  379. }
  380. completed_cycles.append(completed_cycle)
  381. # Reset for next cycle, potentially flip if buy_amount_remaining > 0
  382. pos_data['cycle_trades_details'] = []
  383. pos_data['cycle_start_ts'] = None
  384. pos_data['current_leg_type'] = None
  385. if buy_amount_remaining > 1e-9: # Flipped to long
  386. pos_data['current_leg_type'] = 'long'
  387. pos_data['cycle_start_ts'] = timestamp
  388. pos_data['open_legs'].append({
  389. 'side': 'buy', 'amount_remaining': buy_amount_remaining, 'price': price,
  390. 'timestamp': timestamp, 'value': buy_amount_remaining * price,
  391. 'pnl_contribution': trade_pnl
  392. })
  393. pos_data['cycle_trades_details'] = [current_trade_detail]
  394. return completed_cycles
  395. def get_basic_stats(self, current_balance: Optional[float] = None) -> Dict[str, Any]:
  396. """Get basic trading statistics from DB."""
  397. trades = self._fetch_query("SELECT COUNT(*) as count, side FROM trades GROUP BY side")
  398. total_trades_count = sum(t['count'] for t in trades)
  399. buy_trades_count = next((t['count'] for t in trades if t['side'] == 'buy'), 0)
  400. sell_trades_count = next((t['count'] for t in trades if t['side'] == 'sell'), 0)
  401. initial_balance_str = self._get_metadata('initial_balance')
  402. initial_balance = float(initial_balance_str) if initial_balance_str else 0.0
  403. start_date_iso = self._get_metadata('start_date')
  404. start_date_obj = datetime.fromisoformat(start_date_iso) if start_date_iso else datetime.now(timezone.utc)
  405. days_active = (datetime.now(timezone.utc) - start_date_obj).days + 1
  406. completed_cycles = self.calculate_completed_trade_cycles() # This can be expensive
  407. total_pnl_from_cycles = sum(cycle['total_pnl'] for cycle in completed_cycles)
  408. last_trade_row = self._fetchone_query("SELECT timestamp FROM trades ORDER BY timestamp DESC LIMIT 1")
  409. last_trade_ts = last_trade_row['timestamp'] if last_trade_row else None
  410. return {
  411. 'total_trades': total_trades_count,
  412. 'completed_trades': len(completed_cycles),
  413. 'buy_trades': buy_trades_count,
  414. 'sell_trades': sell_trades_count,
  415. 'initial_balance': initial_balance,
  416. 'total_pnl': total_pnl_from_cycles, # PNL from closed cycles
  417. 'days_active': days_active,
  418. 'start_date': start_date_obj.strftime('%Y-%m-%d'),
  419. 'last_trade': last_trade_ts
  420. }
  421. def get_performance_stats(self) -> Dict[str, Any]:
  422. """Calculate advanced performance statistics using completed cycles."""
  423. completed_cycles = self.calculate_completed_trade_cycles()
  424. if not completed_cycles:
  425. return {
  426. 'win_rate': 0.0, 'profit_factor': 0.0, 'avg_win': 0.0, 'avg_loss': 0.0,
  427. 'largest_win': 0.0, 'largest_loss': 0.0, 'consecutive_wins': 0,
  428. 'consecutive_losses': 0, 'total_wins': 0, 'total_losses': 0, 'expectancy': 0.0
  429. }
  430. wins_pnl = [c['total_pnl'] for c in completed_cycles if c['total_pnl'] > 0]
  431. losses_pnl = [abs(c['total_pnl']) for c in completed_cycles if c['total_pnl'] < 0] # Absolute values for losses
  432. total_wins_count = len(wins_pnl)
  433. total_losses_count = len(losses_pnl)
  434. total_completed_count = total_wins_count + total_losses_count
  435. win_rate = (total_wins_count / total_completed_count * 100) if total_completed_count > 0 else 0.0
  436. sum_of_wins = sum(wins_pnl)
  437. sum_of_losses = sum(losses_pnl) # Sum of absolute losses
  438. profit_factor = (sum_of_wins / sum_of_losses) if sum_of_losses > 0 else float('inf') if sum_of_wins > 0 else 0.0
  439. avg_win = np.mean(wins_pnl) if wins_pnl else 0.0
  440. avg_loss = np.mean(losses_pnl) if losses_pnl else 0.0 # Avg of absolute losses
  441. largest_win = max(wins_pnl) if wins_pnl else 0.0
  442. largest_loss = max(losses_pnl) if losses_pnl else 0.0 # Largest absolute loss
  443. # Consecutive wins/losses
  444. consecutive_wins = 0
  445. consecutive_losses = 0
  446. current_wins = 0
  447. current_losses = 0
  448. for cycle in completed_cycles:
  449. if cycle['total_pnl'] > 0:
  450. current_wins += 1
  451. current_losses = 0
  452. else: # Assumes PNL is non-zero for a loss, or it's a scratch trade
  453. current_losses += 1
  454. current_wins = 0
  455. consecutive_wins = max(consecutive_wins, current_wins)
  456. consecutive_losses = max(consecutive_losses, current_losses)
  457. expectancy = (avg_win * (win_rate / 100)) - (avg_loss * (1 - (win_rate / 100)))
  458. return {
  459. 'win_rate': win_rate, 'profit_factor': profit_factor, 'avg_win': avg_win, 'avg_loss': avg_loss,
  460. 'largest_win': largest_win, 'largest_loss': largest_loss,
  461. 'consecutive_wins': consecutive_wins, 'consecutive_losses': consecutive_losses,
  462. 'total_wins': total_wins_count, 'total_losses': total_losses_count, 'expectancy': expectancy
  463. }
  464. def get_risk_metrics(self) -> Dict[str, Any]:
  465. """Calculate risk-adjusted metrics from daily balances."""
  466. daily_balances_data = self._fetch_query("SELECT balance FROM daily_balances ORDER BY date ASC")
  467. if not daily_balances_data or len(daily_balances_data) < 2:
  468. return {'sharpe_ratio': 0.0, 'sortino_ratio': 0.0, 'max_drawdown': 0.0, 'volatility': 0.0, 'var_95': 0.0}
  469. balances = [entry['balance'] for entry in daily_balances_data]
  470. returns = np.diff(balances) / balances[:-1] # Calculate daily returns
  471. returns = returns[np.isfinite(returns)] # Remove NaNs or Infs if any balance was 0
  472. if returns.size == 0:
  473. return {'sharpe_ratio': 0.0, 'sortino_ratio': 0.0, 'max_drawdown': 0.0, 'volatility': 0.0, 'var_95': 0.0}
  474. risk_free_rate_daily = (1 + 0.02)**(1/365) - 1 # Approx 2% annual risk-free rate, daily
  475. excess_returns = returns - risk_free_rate_daily
  476. sharpe_ratio = np.mean(excess_returns) / np.std(returns) * np.sqrt(365) if np.std(returns) > 0 else 0.0
  477. downside_returns = returns[returns < 0]
  478. downside_std = np.std(downside_returns) if len(downside_returns) > 0 else 0.0
  479. sortino_ratio = np.mean(excess_returns) / downside_std * np.sqrt(365) if downside_std > 0 else 0.0
  480. cumulative_returns = np.cumprod(1 + returns)
  481. peak = np.maximum.accumulate(cumulative_returns)
  482. drawdown = (cumulative_returns - peak) / peak
  483. max_drawdown_pct = abs(np.min(drawdown) * 100) if drawdown.size > 0 else 0.0
  484. volatility_pct = np.std(returns) * np.sqrt(365) * 100
  485. var_95_pct = abs(np.percentile(returns, 5) * 100) if returns.size > 0 else 0.0
  486. return {
  487. 'sharpe_ratio': sharpe_ratio, 'sortino_ratio': sortino_ratio,
  488. 'max_drawdown': max_drawdown_pct, 'volatility': volatility_pct, 'var_95': var_95_pct
  489. }
  490. def get_comprehensive_stats(self, current_balance: Optional[float] = None) -> Dict[str, Any]:
  491. """Get all statistics combined."""
  492. if current_balance is not None: # Ensure it's not just None, but explicitly provided
  493. self.record_balance(current_balance) # Record current balance for today
  494. basic = self.get_basic_stats(current_balance) # Pass current_balance for P&L context if needed
  495. performance = self.get_performance_stats()
  496. risk = self.get_risk_metrics()
  497. initial_balance = basic['initial_balance']
  498. total_return_pct = 0.0
  499. # Use current_balance if available and valid for total return calculation
  500. # Otherwise, PNL from basic_stats (closed trades) is the primary PNL source
  501. # This needs careful thought: current_balance reflects unrealized PNL too.
  502. # The original code used current_balance - initial_balance for total_pnl if current_balance provided.
  503. effective_balance_for_return = current_balance if current_balance is not None else (initial_balance + basic['total_pnl'])
  504. if initial_balance > 0:
  505. total_return_pct = ((effective_balance_for_return - initial_balance) / initial_balance) * 100
  506. return {
  507. 'basic': basic,
  508. 'performance': performance,
  509. 'risk': risk,
  510. 'current_balance': current_balance if current_balance is not None else initial_balance + basic['total_pnl'], # Best estimate
  511. 'total_return': total_return_pct, # Percentage
  512. 'last_updated': datetime.now(timezone.utc).isoformat()
  513. }
  514. def _get_open_positions_count_from_db(self) -> int:
  515. """🧹 PHASE 4: Get count of open positions from enhanced trades table."""
  516. row = self._fetchone_query("SELECT COUNT(DISTINCT symbol) as count FROM trades WHERE status = 'position_opened'")
  517. return row['count'] if row else 0
  518. def format_stats_message(self, current_balance: Optional[float] = None) -> str:
  519. """Format stats for Telegram display using data from DB."""
  520. try:
  521. stats = self.get_comprehensive_stats(current_balance)
  522. basic = stats['basic']
  523. perf = stats['performance']
  524. # risk = stats['risk'] # Risk metrics not directly used in this message format previously
  525. # Use current_balance passed or derived in get_comprehensive_stats
  526. effective_current_balance = stats['current_balance']
  527. initial_bal = basic['initial_balance']
  528. # Total P&L should reflect current worth vs initial, including open positions if current_balance is live
  529. total_pnl_val = effective_current_balance - initial_bal if initial_bal > 0 else basic['total_pnl'] # Fallback to closed PNL
  530. total_return_pct = (total_pnl_val / initial_bal * 100) if initial_bal > 0 else 0.0
  531. pnl_emoji = "🟢" if total_pnl_val >= 0 else "🔴"
  532. open_positions_count = self._get_open_positions_count_from_db()
  533. # Calculate trade volume and average trade size from 'trades' table for sell orders
  534. sell_trades_data = self._fetch_query("SELECT value FROM trades WHERE side = 'sell'")
  535. total_sell_volume = sum(t['value'] for t in sell_trades_data)
  536. avg_trade_size_sell = (total_sell_volume / len(sell_trades_data)) if sell_trades_data else 0.0
  537. adjustments_summary = self.get_balance_adjustments_summary()
  538. stats_text = f"""📊 <b>Trading Statistics</b>
  539. 💰 <b>Account Overview:</b>
  540. • Current Balance: ${effective_current_balance:,.2f}
  541. • Initial Balance: ${initial_bal:,.2f}
  542. • {pnl_emoji} Total P&L: ${total_pnl_val:,.2f} ({total_return_pct:+.2f}%)
  543. 📈 <b>Trading Activity:</b>
  544. • Total Orders: {basic['total_trades']}
  545. • Completed Trades (Cycles): {basic['completed_trades']}
  546. • Open Positions: {open_positions_count}
  547. • Days Active: {basic['days_active']}
  548. 🏆 <b>Performance Metrics:</b>
  549. • Win Rate: {perf['win_rate']:.1f}% ({perf['total_wins']}W/{perf['total_losses']}L)
  550. • Profit Factor: {perf['profit_factor']:.2f}
  551. • Avg Win: ${perf['avg_win']:.2f} | Avg Loss: ${perf['avg_loss']:.2f}
  552. • Largest Win: ${perf['largest_win']:.2f} | Largest Loss: ${perf['largest_loss']:.2f}
  553. """
  554. if adjustments_summary['adjustment_count'] > 0:
  555. adj_emoji = "💰" if adjustments_summary['net_adjustment'] >= 0 else "💸"
  556. stats_text += f"""
  557. 💰 <b>Balance Adjustments:</b>
  558. • Deposits: ${adjustments_summary['total_deposits']:,.2f}
  559. • Withdrawals: ${adjustments_summary['total_withdrawals']:,.2f}
  560. • {adj_emoji} Net: ${adjustments_summary['net_adjustment']:,.2f} ({adjustments_summary['adjustment_count']} transactions)
  561. """
  562. stats_text += f"""
  563. 🎯 <b>Trade Distribution:</b>
  564. • Buy Orders: {basic['buy_trades']} | Sell Orders: {basic['sell_trades']}
  565. • Volume Traded (Sells): ${total_sell_volume:,.2f}
  566. • Avg Sell Trade Size: ${avg_trade_size_sell:.2f}
  567. ⏰ <b>Session Info:</b>
  568. • Started: {basic['start_date']}
  569. • Last Update: {datetime.now(timezone.utc).strftime('%Y-%m-%d %H:%M:%S UTC')}
  570. """ # Changed Last Update format
  571. return stats_text.strip()
  572. except Exception as e:
  573. logger.error(f"Error formatting stats message: {e}", exc_info=True)
  574. return f"📊 <b>Trading Statistics</b>\n\n❌ <b>Error loading statistics</b>\n\n🔧 <b>Debug info:</b> {str(e)[:100]}"
  575. def get_recent_trades(self, limit: int = 10) -> List[Dict[str, Any]]:
  576. """Get recent trades from DB."""
  577. return self._fetch_query("SELECT * FROM trades ORDER BY timestamp DESC LIMIT ?", (limit,))
  578. def get_token_performance(self) -> Dict[str, Dict[str, Any]]:
  579. """Get performance statistics grouped by token using completed cycles."""
  580. completed_cycles = self.calculate_completed_trade_cycles()
  581. token_performance = {}
  582. # Group cycles by token (symbol's base part)
  583. token_cycles_map = defaultdict(list)
  584. for cycle in completed_cycles:
  585. token_cycles_map[cycle['token']].append(cycle)
  586. for token, cycles_for_token in token_cycles_map.items():
  587. if not cycles_for_token: continue
  588. wins_pnl = [c['total_pnl'] for c in cycles_for_token if c['total_pnl'] > 0]
  589. losses_pnl = [abs(c['total_pnl']) for c in cycles_for_token if c['total_pnl'] < 0]
  590. total_pnl = sum(c['total_pnl'] for c in cycles_for_token)
  591. total_volume_sold = sum(c['sell_value'] for c in cycles_for_token) # Based on sell value in cycle
  592. pnl_percentage = (total_pnl / total_volume_sold * 100) if total_volume_sold > 0 else 0.0
  593. total_wins_count = len(wins_pnl)
  594. total_losses_count = len(losses_pnl)
  595. total_completed_count = total_wins_count + total_losses_count
  596. win_rate = (total_wins_count / total_completed_count * 100) if total_completed_count > 0 else 0.0
  597. sum_of_wins = sum(wins_pnl)
  598. sum_of_losses = sum(losses_pnl)
  599. profit_factor = (sum_of_wins / sum_of_losses) if sum_of_losses > 0 else float('inf') if sum_of_wins > 0 else 0.0
  600. avg_win = np.mean(wins_pnl) if wins_pnl else 0.0
  601. avg_loss = np.mean(losses_pnl) if losses_pnl else 0.0
  602. expectancy = (avg_win * (win_rate / 100)) - (avg_loss * (1 - (win_rate / 100)))
  603. largest_win = max(wins_pnl) if wins_pnl else 0.0
  604. largest_loss = max(losses_pnl) if losses_pnl else 0.0
  605. token_performance[token] = {
  606. 'total_pnl': total_pnl, 'pnl_percentage': pnl_percentage,
  607. 'completed_trades': total_completed_count, 'total_volume': total_volume_sold,
  608. 'win_rate': win_rate, 'total_wins': total_wins_count, 'total_losses': total_losses_count,
  609. 'profit_factor': profit_factor, 'expectancy': expectancy,
  610. 'largest_win': largest_win, 'largest_loss': largest_loss,
  611. 'avg_win': avg_win, 'avg_loss': avg_loss
  612. # 'cycles': cycles_for_token # Optionally include raw cycle data
  613. }
  614. return token_performance
  615. def get_token_detailed_stats(self, token: str) -> Dict[str, Any]:
  616. """Get detailed statistics for a specific token using DB queries and cycle calculation."""
  617. upper_token = _normalize_token_case(token)
  618. # Get all trades for this specific token (symbol starts with token + '/')
  619. # This is simpler than trying to filter cycles by token string directly in SQL for complex symbols
  620. all_trades_for_token_symbol_prefix = self._fetch_query(
  621. "SELECT * FROM trades WHERE symbol LIKE ? ORDER BY timestamp ASC", (f"{upper_token}/%",)
  622. )
  623. if not all_trades_for_token_symbol_prefix:
  624. return {
  625. 'token': upper_token, 'total_trades': 0, 'total_pnl': 0.0, 'win_rate': 0.0,
  626. 'message': f"No trading history found for {upper_token}"
  627. }
  628. # Calculate completed cycles specifically for these trades
  629. # To correctly calculate cycles for *only* this token, we need to run the FIFO logic
  630. # on trades filtered for this token.
  631. # The global `calculate_completed_trade_cycles` uses *all* trades.
  632. all_completed_cycles = self.calculate_completed_trade_cycles()
  633. token_cycles = [c for c in all_completed_cycles if _normalize_token_case(c['token']) == upper_token]
  634. total_individual_orders = len(all_trades_for_token_symbol_prefix)
  635. buy_orders = len([t for t in all_trades_for_token_symbol_prefix if t['side'] == 'buy'])
  636. sell_orders = len([t for t in all_trades_for_token_symbol_prefix if t['side'] == 'sell'])
  637. total_volume_all_orders = sum(t['value'] for t in all_trades_for_token_symbol_prefix)
  638. if not token_cycles:
  639. return {
  640. 'token': upper_token, 'total_trades': total_individual_orders, 'buy_trades': buy_orders,
  641. 'sell_trades': sell_orders, 'total_volume': total_volume_all_orders,
  642. 'completed_trades': 0, 'total_pnl': 0.0, 'pnl_percentage': 0.0, 'win_rate': 0.0,
  643. 'message': f"{upper_token} has open positions or trades but no completed trade cycles yet."
  644. }
  645. # Performance based on this token's completed cycles
  646. perf_stats = self.get_token_performance().get(upper_token, {}) # Re-use general calculation logic
  647. # Filter for recent closed trades
  648. recent_closed_trades = [t for t in all_trades_for_token_symbol_prefix if t.get('status') == 'position_closed']
  649. return {
  650. 'token': upper_token,
  651. 'total_trades': total_individual_orders,
  652. 'completed_trades': perf_stats.get('completed_trades', 0),
  653. 'buy_trades': buy_orders,
  654. 'sell_trades': sell_orders,
  655. 'total_volume': total_volume_all_orders, # Volume of all orders for this token
  656. 'completed_volume': perf_stats.get('total_volume', 0.0), # Volume from completed cycles
  657. 'total_pnl': perf_stats.get('total_pnl', 0.0),
  658. 'pnl_percentage': perf_stats.get('pnl_percentage', 0.0),
  659. 'win_rate': perf_stats.get('win_rate', 0.0),
  660. 'profit_factor': perf_stats.get('profit_factor', 0.0),
  661. 'avg_win': perf_stats.get('avg_win', 0.0),
  662. 'avg_loss': perf_stats.get('avg_loss', 0.0),
  663. 'largest_win': perf_stats.get('largest_win', 0.0),
  664. 'largest_loss': perf_stats.get('largest_loss', 0.0),
  665. 'expectancy': perf_stats.get('expectancy', 0.0),
  666. 'total_wins': perf_stats.get('total_wins',0),
  667. 'total_losses': perf_stats.get('total_losses',0),
  668. 'recent_trades': recent_closed_trades[-5:], # Last 5 CLOSET trades for this token
  669. 'cycles': token_cycles # Optionally include raw cycle data
  670. }
  671. def _get_aggregated_period_stats_from_cycles(self) -> Dict[str, Dict[str, Dict[str, Any]]]:
  672. """Helper to aggregate completed cycles by day, week, month for P&L and volume."""
  673. completed_cycles = self.calculate_completed_trade_cycles()
  674. daily_aggr = defaultdict(lambda: {'trades': 0, 'pnl': 0.0, 'volume': 0.0})
  675. weekly_aggr = defaultdict(lambda: {'trades': 0, 'pnl': 0.0, 'volume': 0.0})
  676. monthly_aggr = defaultdict(lambda: {'trades': 0, 'pnl': 0.0, 'volume': 0.0})
  677. for cycle in completed_cycles:
  678. try:
  679. # Use cycle_end timestamp (string) and parse it
  680. end_dt = datetime.fromisoformat(cycle['cycle_end'])
  681. if end_dt.tzinfo is None: # Ensure timezone aware for proper calculations
  682. end_dt = end_dt.replace(tzinfo=timezone.utc)
  683. else:
  684. end_dt = end_dt.astimezone(timezone.utc)
  685. pnl = cycle['total_pnl']
  686. volume = cycle['sell_value'] # Volume based on sell value of the cycle
  687. # Daily
  688. day_key = end_dt.strftime('%Y-%m-%d')
  689. daily_aggr[day_key]['trades'] += 1
  690. daily_aggr[day_key]['pnl'] += pnl
  691. daily_aggr[day_key]['volume'] += volume
  692. # Weekly (YYYY-Www, where ww is week number 00-53, Monday as first day)
  693. week_key = end_dt.strftime('%Y-W%W') # %W for Monday as first day
  694. weekly_aggr[week_key]['trades'] += 1
  695. weekly_aggr[week_key]['pnl'] += pnl
  696. weekly_aggr[week_key]['volume'] += volume
  697. # Monthly
  698. month_key = end_dt.strftime('%Y-%m')
  699. monthly_aggr[month_key]['trades'] += 1
  700. monthly_aggr[month_key]['pnl'] += pnl
  701. monthly_aggr[month_key]['volume'] += volume
  702. except Exception as e:
  703. logger.warning(f"Could not parse cycle_end '{cycle.get('cycle_end')}' for periodic stats: {e}")
  704. continue
  705. for aggr_dict in [daily_aggr, weekly_aggr, monthly_aggr]:
  706. for stats in aggr_dict.values():
  707. stats['pnl_pct'] = (stats['pnl'] / stats['volume'] * 100) if stats['volume'] > 0 else 0.0
  708. return {'daily': dict(daily_aggr), 'weekly': dict(weekly_aggr), 'monthly': dict(monthly_aggr)}
  709. def get_daily_stats(self, limit: int = 10) -> List[Dict[str, Any]]:
  710. """Get daily performance stats for the last N days."""
  711. period_aggregates = self._get_aggregated_period_stats_from_cycles()['daily']
  712. daily_stats_list = []
  713. today_utc = datetime.now(timezone.utc).date()
  714. for i in range(limit):
  715. target_date = today_utc - timedelta(days=i)
  716. date_str = target_date.strftime('%Y-%m-%d')
  717. date_formatted = target_date.strftime('%m/%d') # For display
  718. stats_for_day = period_aggregates.get(date_str)
  719. if stats_for_day:
  720. daily_stats_list.append({
  721. 'date': date_str, 'date_formatted': date_formatted, 'has_trades': True,
  722. **stats_for_day
  723. })
  724. else:
  725. daily_stats_list.append({
  726. 'date': date_str, 'date_formatted': date_formatted, 'has_trades': False,
  727. 'trades': 0, 'pnl': 0.0, 'volume': 0.0, 'pnl_pct': 0.0
  728. })
  729. return daily_stats_list
  730. def get_weekly_stats(self, limit: int = 10) -> List[Dict[str, Any]]:
  731. """Get weekly performance stats for the last N weeks."""
  732. period_aggregates = self._get_aggregated_period_stats_from_cycles()['weekly']
  733. weekly_stats_list = []
  734. today_utc = datetime.now(timezone.utc).date()
  735. for i in range(limit):
  736. # Target week starts on Monday 'i' weeks ago
  737. target_monday = today_utc - timedelta(days=today_utc.weekday() + (i * 7))
  738. target_sunday = target_monday + timedelta(days=6)
  739. week_key = target_monday.strftime('%Y-W%W') # %W for Monday as first day
  740. week_formatted = f"{target_monday.strftime('%m/%d')}-{target_sunday.strftime('%m/%d')}"
  741. stats_for_week = period_aggregates.get(week_key)
  742. if stats_for_week:
  743. weekly_stats_list.append({
  744. 'week': week_key, 'week_formatted': week_formatted, 'has_trades': True,
  745. **stats_for_week
  746. })
  747. else:
  748. weekly_stats_list.append({
  749. 'week': week_key, 'week_formatted': week_formatted, 'has_trades': False,
  750. 'trades': 0, 'pnl': 0.0, 'volume': 0.0, 'pnl_pct': 0.0
  751. })
  752. return weekly_stats_list
  753. def get_monthly_stats(self, limit: int = 10) -> List[Dict[str, Any]]:
  754. """Get monthly performance stats for the last N months."""
  755. period_aggregates = self._get_aggregated_period_stats_from_cycles()['monthly']
  756. monthly_stats_list = []
  757. current_month_start_utc = datetime.now(timezone.utc).date().replace(day=1)
  758. for i in range(limit):
  759. # Calculate target month by subtracting months
  760. year = current_month_start_utc.year
  761. month = current_month_start_utc.month - i
  762. while month <= 0: # Adjust year if month goes to 0 or negative
  763. month += 12
  764. year -= 1
  765. target_month_date = datetime(year, month, 1, tzinfo=timezone.utc).date()
  766. month_key = target_month_date.strftime('%Y-%m')
  767. month_formatted = target_month_date.strftime('%b %Y')
  768. stats_for_month = period_aggregates.get(month_key)
  769. if stats_for_month:
  770. monthly_stats_list.append({
  771. 'month': month_key, 'month_formatted': month_formatted, 'has_trades': True,
  772. **stats_for_month
  773. })
  774. else:
  775. monthly_stats_list.append({
  776. 'month': month_key, 'month_formatted': month_formatted, 'has_trades': False,
  777. 'trades': 0, 'pnl': 0.0, 'volume': 0.0, 'pnl_pct': 0.0
  778. })
  779. return monthly_stats_list
  780. def record_deposit(self, amount: float, timestamp: Optional[str] = None,
  781. deposit_id: Optional[str] = None, description: Optional[str] = None):
  782. """Record a deposit."""
  783. ts = timestamp if timestamp else datetime.now(timezone.utc).isoformat()
  784. desc = description if description else f'Deposit of ${amount:.2f}'
  785. self._execute_query(
  786. "INSERT INTO balance_adjustments (adjustment_id, timestamp, type, amount, description) VALUES (?, ?, ?, ?, ?)",
  787. (deposit_id, ts, 'deposit', amount, desc)
  788. )
  789. # Adjust initial_balance in metadata to reflect capital changes
  790. current_initial = float(self._get_metadata('initial_balance') or '0.0')
  791. self._set_metadata('initial_balance', str(current_initial + amount))
  792. logger.info(f"💰 Recorded deposit: ${amount:.2f}. New effective initial balance: ${current_initial + amount:.2f}")
  793. def record_withdrawal(self, amount: float, timestamp: Optional[str] = None,
  794. withdrawal_id: Optional[str] = None, description: Optional[str] = None):
  795. """Record a withdrawal."""
  796. ts = timestamp if timestamp else datetime.now(timezone.utc).isoformat()
  797. desc = description if description else f'Withdrawal of ${amount:.2f}'
  798. self._execute_query(
  799. "INSERT INTO balance_adjustments (adjustment_id, timestamp, type, amount, description) VALUES (?, ?, ?, ?, ?)",
  800. (withdrawal_id, ts, 'withdrawal', amount, desc) # Store positive amount, type indicates withdrawal
  801. )
  802. current_initial = float(self._get_metadata('initial_balance') or '0.0')
  803. self._set_metadata('initial_balance', str(current_initial - amount))
  804. logger.info(f"💸 Recorded withdrawal: ${amount:.2f}. New effective initial balance: ${current_initial - amount:.2f}")
  805. def get_balance_adjustments_summary(self) -> Dict[str, Any]:
  806. """Get summary of all balance adjustments from DB."""
  807. adjustments = self._fetch_query("SELECT type, amount, timestamp FROM balance_adjustments ORDER BY timestamp ASC")
  808. if not adjustments:
  809. return {'total_deposits': 0.0, 'total_withdrawals': 0.0, 'net_adjustment': 0.0,
  810. 'adjustment_count': 0, 'last_adjustment': None}
  811. total_deposits = sum(adj['amount'] for adj in adjustments if adj['type'] == 'deposit')
  812. total_withdrawals = sum(adj['amount'] for adj in adjustments if adj['type'] == 'withdrawal') # Amounts stored positive
  813. net_adjustment = total_deposits - total_withdrawals
  814. return {
  815. 'total_deposits': total_deposits, 'total_withdrawals': total_withdrawals,
  816. 'net_adjustment': net_adjustment, 'adjustment_count': len(adjustments),
  817. 'last_adjustment': adjustments[-1]['timestamp'] if adjustments else None
  818. }
  819. def close_connection(self):
  820. """Close the SQLite database connection."""
  821. if self.conn:
  822. self.conn.close()
  823. logger.info("TradingStats SQLite connection closed.")
  824. def __del__(self):
  825. """Ensure connection is closed when object is deleted."""
  826. self.close_connection()
  827. # --- Order Table Management ---
  828. def record_order_placed(self, symbol: str, side: str, order_type: str,
  829. amount_requested: float, price: Optional[float] = None,
  830. bot_order_ref_id: Optional[str] = None,
  831. exchange_order_id: Optional[str] = None,
  832. status: str = 'open',
  833. parent_bot_order_ref_id: Optional[str] = None) -> Optional[int]:
  834. """Record a newly placed order in the 'orders' table. Returns the ID of the inserted order or None on failure."""
  835. now_iso = datetime.now(timezone.utc).isoformat()
  836. query = """
  837. INSERT INTO orders (bot_order_ref_id, exchange_order_id, symbol, side, type,
  838. amount_requested, price, status, timestamp_created, timestamp_updated, parent_bot_order_ref_id)
  839. VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
  840. """
  841. params = (bot_order_ref_id, exchange_order_id, symbol, side.lower(), order_type.lower(),
  842. amount_requested, price, status.lower(), now_iso, now_iso, parent_bot_order_ref_id)
  843. try:
  844. cur = self.conn.cursor()
  845. cur.execute(query, params)
  846. self.conn.commit()
  847. order_db_id = cur.lastrowid
  848. logger.info(f"Recorded order placed: ID {order_db_id}, Symbol {symbol}, Side {side}, Type {order_type}, Amount {amount_requested}, BotRef {bot_order_ref_id}, ExchID {exchange_order_id}")
  849. return order_db_id
  850. except sqlite3.IntegrityError as e:
  851. logger.error(f"Failed to record order due to IntegrityError (likely duplicate bot_order_ref_id '{bot_order_ref_id}' or exchange_order_id '{exchange_order_id}'): {e}")
  852. return None
  853. except Exception as e:
  854. logger.error(f"Failed to record order: {e}")
  855. return None
  856. def update_order_status(self, order_db_id: Optional[int] = None, bot_order_ref_id: Optional[str] = None, exchange_order_id: Optional[str] = None,
  857. new_status: Optional[str] = None, amount_filled_increment: Optional[float] = None, set_exchange_order_id: Optional[str] = None) -> bool:
  858. """Update an existing order's status and/or amount_filled. Identify order by order_db_id, bot_order_ref_id, or exchange_order_id.
  859. Args:
  860. order_db_id: Database ID to identify the order
  861. bot_order_ref_id: Bot's internal reference ID to identify the order
  862. exchange_order_id: Exchange's order ID to identify the order
  863. new_status: New status to set
  864. amount_filled_increment: Amount to add to current filled amount
  865. set_exchange_order_id: If provided, sets/updates the exchange_order_id field in the database
  866. """
  867. if not any([order_db_id, bot_order_ref_id, exchange_order_id]):
  868. logger.error("Must provide one of order_db_id, bot_order_ref_id, or exchange_order_id to update order.")
  869. return False
  870. now_iso = datetime.now(timezone.utc).isoformat()
  871. set_clauses = []
  872. params = []
  873. if new_status:
  874. set_clauses.append("status = ?")
  875. params.append(new_status.lower())
  876. if set_exchange_order_id is not None:
  877. set_clauses.append("exchange_order_id = ?")
  878. params.append(set_exchange_order_id)
  879. current_amount_filled = 0.0
  880. identifier_clause = ""
  881. identifier_param = None
  882. if order_db_id:
  883. identifier_clause = "id = ?"
  884. identifier_param = order_db_id
  885. elif bot_order_ref_id:
  886. identifier_clause = "bot_order_ref_id = ?"
  887. identifier_param = bot_order_ref_id
  888. elif exchange_order_id:
  889. identifier_clause = "exchange_order_id = ?"
  890. identifier_param = exchange_order_id
  891. if amount_filled_increment is not None and amount_filled_increment > 0:
  892. # To correctly increment, we might need to fetch current filled amount first if DB doesn't support direct increment easily or atomically with other updates.
  893. # For simplicity here, assuming we can use SQL's increment if other fields are not changing, or we do it in two steps.
  894. # Let's assume we fetch first then update to be safe and clear.
  895. order_data = self._fetchone_query(f"SELECT amount_filled FROM orders WHERE {identifier_clause}", (identifier_param,))
  896. if order_data:
  897. current_amount_filled = order_data.get('amount_filled', 0.0)
  898. else:
  899. logger.warning(f"Order not found by {identifier_clause}={identifier_param} when trying to increment amount_filled.")
  900. # Potentially still update status if new_status is provided, but amount_filled won't be right.
  901. # For now, let's proceed with update if status is there.
  902. set_clauses.append("amount_filled = ?")
  903. params.append(current_amount_filled + amount_filled_increment)
  904. if not set_clauses:
  905. logger.info("No fields to update for order.")
  906. return True # Or False if an update was expected
  907. set_clauses.append("timestamp_updated = ?")
  908. params.append(now_iso)
  909. params.append(identifier_param) # Add identifier param at the end for WHERE clause
  910. query = f"UPDATE orders SET { ', '.join(set_clauses) } WHERE {identifier_clause}"
  911. try:
  912. self._execute_query(query, params)
  913. log_msg = f"Updated order ({identifier_clause}={identifier_param}): Status to '{new_status or 'N/A'}', Filled increment {amount_filled_increment or 0.0}"
  914. if set_exchange_order_id is not None:
  915. log_msg += f", Exchange ID set to '{set_exchange_order_id}'"
  916. logger.info(log_msg)
  917. return True
  918. except Exception as e:
  919. logger.error(f"Failed to update order ({identifier_clause}={identifier_param}): {e}")
  920. return False
  921. def get_order_by_db_id(self, order_db_id: int) -> Optional[Dict[str, Any]]:
  922. """Fetch an order by its database primary key ID."""
  923. return self._fetchone_query("SELECT * FROM orders WHERE id = ?", (order_db_id,))
  924. def get_order_by_bot_ref_id(self, bot_order_ref_id: str) -> Optional[Dict[str, Any]]:
  925. """Fetch an order by the bot's internal reference ID."""
  926. return self._fetchone_query("SELECT * FROM orders WHERE bot_order_ref_id = ?", (bot_order_ref_id,))
  927. def get_order_by_exchange_id(self, exchange_order_id: str) -> Optional[Dict[str, Any]]:
  928. """Fetch an order by the exchange's order ID."""
  929. return self._fetchone_query("SELECT * FROM orders WHERE exchange_order_id = ?", (exchange_order_id,))
  930. def get_orders_by_status(self, status: str, order_type_filter: Optional[str] = None, parent_bot_order_ref_id: Optional[str] = None) -> List[Dict[str, Any]]:
  931. """Fetch all orders with a specific status, optionally filtering by order_type and parent_bot_order_ref_id."""
  932. query = "SELECT * FROM orders WHERE status = ?"
  933. params = [status.lower()]
  934. if order_type_filter:
  935. query += " AND type = ?"
  936. params.append(order_type_filter.lower())
  937. if parent_bot_order_ref_id:
  938. query += " AND parent_bot_order_ref_id = ?"
  939. params.append(parent_bot_order_ref_id)
  940. query += " ORDER BY timestamp_created ASC"
  941. return self._fetch_query(query, tuple(params))
  942. def cancel_linked_orders(self, parent_bot_order_ref_id: str, new_status: str = 'cancelled_parent_filled') -> int:
  943. """Cancel all orders linked to a parent order (e.g., pending stop losses when parent order fills or gets cancelled).
  944. Returns the number of orders that were cancelled."""
  945. linked_orders = self.get_orders_by_status('pending_trigger', parent_bot_order_ref_id=parent_bot_order_ref_id)
  946. cancelled_count = 0
  947. for order in linked_orders:
  948. order_db_id = order.get('id')
  949. if order_db_id:
  950. success = self.update_order_status(order_db_id=order_db_id, new_status=new_status)
  951. if success:
  952. cancelled_count += 1
  953. logger.info(f"Cancelled linked order ID {order_db_id} (parent: {parent_bot_order_ref_id}) -> status: {new_status}")
  954. return cancelled_count
  955. def cancel_pending_stop_losses_by_symbol(self, symbol: str, new_status: str = 'cancelled_position_closed') -> int:
  956. """Cancel all pending stop loss orders for a specific symbol (when position is closed).
  957. Returns the number of stop loss orders that were cancelled."""
  958. query = "SELECT * FROM orders WHERE symbol = ? AND status = 'pending_trigger' AND type = 'stop_limit_trigger'"
  959. pending_stop_losses = self._fetch_query(query, (symbol,))
  960. cancelled_count = 0
  961. for order in pending_stop_losses:
  962. order_db_id = order.get('id')
  963. if order_db_id:
  964. success = self.update_order_status(order_db_id=order_db_id, new_status=new_status)
  965. if success:
  966. cancelled_count += 1
  967. logger.info(f"Cancelled pending SL order ID {order_db_id} for {symbol} -> status: {new_status}")
  968. return cancelled_count
  969. def get_order_cleanup_summary(self) -> Dict[str, Any]:
  970. """Get summary of order cleanup actions for monitoring and debugging."""
  971. try:
  972. # Get counts of different cancellation types
  973. cleanup_stats = {}
  974. cancellation_types = [
  975. 'cancelled_parent_cancelled',
  976. 'cancelled_parent_disappeared',
  977. 'cancelled_manual_exit',
  978. 'cancelled_auto_exit',
  979. 'cancelled_no_position',
  980. 'cancelled_external_position_close',
  981. 'cancelled_orphaned_no_position',
  982. 'cancelled_externally',
  983. 'immediately_executed_on_activation',
  984. 'activation_execution_failed',
  985. 'activation_execution_error'
  986. ]
  987. for cancel_type in cancellation_types:
  988. count_result = self._fetchone_query(
  989. "SELECT COUNT(*) as count FROM orders WHERE status = ?",
  990. (cancel_type,)
  991. )
  992. cleanup_stats[cancel_type] = count_result['count'] if count_result else 0
  993. # Get currently pending stop losses
  994. pending_sls = self.get_orders_by_status('pending_trigger', 'stop_limit_trigger')
  995. cleanup_stats['currently_pending_stop_losses'] = len(pending_sls)
  996. # Get total orders in various states
  997. active_orders = self._fetchone_query(
  998. "SELECT COUNT(*) as count FROM orders WHERE status IN ('open', 'submitted', 'partially_filled')",
  999. ()
  1000. )
  1001. cleanup_stats['currently_active_orders'] = active_orders['count'] if active_orders else 0
  1002. return cleanup_stats
  1003. except Exception as e:
  1004. logger.error(f"Error getting order cleanup summary: {e}")
  1005. return {}
  1006. def get_external_activity_summary(self, days: int = 7) -> Dict[str, Any]:
  1007. """Get summary of external activity (trades and cancellations) over the last N days."""
  1008. try:
  1009. from datetime import timedelta
  1010. cutoff_date = (datetime.now(timezone.utc) - timedelta(days=days)).isoformat()
  1011. # External trades
  1012. external_trades = self._fetch_query(
  1013. "SELECT COUNT(*) as count, side FROM trades WHERE trade_type = 'external' AND timestamp >= ? GROUP BY side",
  1014. (cutoff_date,)
  1015. )
  1016. external_trade_summary = {
  1017. 'external_buy_trades': 0,
  1018. 'external_sell_trades': 0,
  1019. 'total_external_trades': 0
  1020. }
  1021. for trade_group in external_trades:
  1022. side = trade_group['side']
  1023. count = trade_group['count']
  1024. external_trade_summary['total_external_trades'] += count
  1025. if side == 'buy':
  1026. external_trade_summary['external_buy_trades'] = count
  1027. elif side == 'sell':
  1028. external_trade_summary['external_sell_trades'] = count
  1029. # External cancellations
  1030. external_cancellations = self._fetchone_query(
  1031. "SELECT COUNT(*) as count FROM orders WHERE status = 'cancelled_externally' AND timestamp_updated >= ?",
  1032. (cutoff_date,)
  1033. )
  1034. external_trade_summary['external_cancellations'] = external_cancellations['count'] if external_cancellations else 0
  1035. # Cleanup actions
  1036. cleanup_cancellations = self._fetchone_query(
  1037. """SELECT COUNT(*) as count FROM orders
  1038. WHERE status LIKE 'cancelled_%'
  1039. AND status != 'cancelled_externally'
  1040. AND timestamp_updated >= ?""",
  1041. (cutoff_date,)
  1042. )
  1043. external_trade_summary['cleanup_cancellations'] = cleanup_cancellations['count'] if cleanup_cancellations else 0
  1044. external_trade_summary['period_days'] = days
  1045. return external_trade_summary
  1046. except Exception as e:
  1047. logger.error(f"Error getting external activity summary: {e}")
  1048. return {'period_days': days, 'total_external_trades': 0, 'external_cancellations': 0}
  1049. # --- End Order Table Management ---
  1050. # =============================================================================
  1051. # TRADE LIFECYCLE MANAGEMENT - PHASE 4: UNIFIED TRADES TABLE
  1052. # =============================================================================
  1053. def create_trade_lifecycle(self, symbol: str, side: str, entry_order_id: Optional[str] = None,
  1054. stop_loss_price: Optional[float] = None, take_profit_price: Optional[float] = None,
  1055. trade_type: str = 'manual') -> Optional[str]:
  1056. """Create a new trade lifecycle when an entry order is placed."""
  1057. try:
  1058. lifecycle_id = str(uuid.uuid4())
  1059. query = """
  1060. INSERT INTO trades (
  1061. symbol, side, amount, price, value, trade_type, timestamp,
  1062. status, trade_lifecycle_id, position_side, entry_order_id,
  1063. stop_loss_price, take_profit_price, updated_at
  1064. ) VALUES (?, ?, 0, 0, 0, ?, ?, 'pending', ?, 'flat', ?, ?, ?, ?)
  1065. """
  1066. timestamp = datetime.now(timezone.utc).isoformat()
  1067. params = (symbol, side.lower(), trade_type, timestamp, lifecycle_id,
  1068. entry_order_id, stop_loss_price, take_profit_price, timestamp)
  1069. self._execute_query(query, params)
  1070. logger.info(f"📊 Created trade lifecycle {lifecycle_id}: {side.upper()} {symbol} (pending)")
  1071. return lifecycle_id
  1072. except Exception as e:
  1073. logger.error(f"❌ Error creating trade lifecycle: {e}")
  1074. return None
  1075. def update_trade_position_opened(self, lifecycle_id: str, entry_price: float,
  1076. entry_amount: float, exchange_fill_id: str) -> bool:
  1077. """Update trade when position is opened (entry order filled)."""
  1078. try:
  1079. query = """
  1080. UPDATE trades
  1081. SET status = 'position_opened',
  1082. amount = ?,
  1083. price = ?,
  1084. value = ?,
  1085. entry_price = ?,
  1086. current_position_size = ?,
  1087. position_side = CASE
  1088. WHEN side = 'buy' THEN 'long'
  1089. WHEN side = 'sell' THEN 'short'
  1090. ELSE position_side
  1091. END,
  1092. exchange_fill_id = ?,
  1093. position_opened_at = ?,
  1094. updated_at = ?
  1095. WHERE trade_lifecycle_id = ? AND status = 'pending'
  1096. """
  1097. timestamp = datetime.now(timezone.utc).isoformat()
  1098. value = entry_amount * entry_price
  1099. params = (entry_amount, entry_price, value, entry_price, entry_amount,
  1100. exchange_fill_id, timestamp, timestamp, lifecycle_id)
  1101. self._execute_query(query, params)
  1102. logger.info(f"📈 Trade lifecycle {lifecycle_id} position opened: {entry_amount} @ ${entry_price:.2f}")
  1103. return True
  1104. except Exception as e:
  1105. logger.error(f"❌ Error updating trade position opened: {e}")
  1106. return False
  1107. def update_trade_position_closed(self, lifecycle_id: str, exit_price: float,
  1108. realized_pnl: float, exchange_fill_id: str) -> bool:
  1109. """Update trade when position is fully closed."""
  1110. try:
  1111. query = """
  1112. UPDATE trades
  1113. SET status = 'position_closed',
  1114. current_position_size = 0,
  1115. position_side = 'flat',
  1116. realized_pnl = ?,
  1117. position_closed_at = ?,
  1118. updated_at = ?
  1119. WHERE trade_lifecycle_id = ? AND status = 'position_opened'
  1120. """
  1121. timestamp = datetime.now(timezone.utc).isoformat()
  1122. params = (realized_pnl, timestamp, timestamp, lifecycle_id)
  1123. self._execute_query(query, params)
  1124. pnl_emoji = "🟢" if realized_pnl >= 0 else "🔴"
  1125. logger.info(f"{pnl_emoji} Trade lifecycle {lifecycle_id} position closed: P&L ${realized_pnl:.2f}")
  1126. return True
  1127. except Exception as e:
  1128. logger.error(f"❌ Error updating trade position closed: {e}")
  1129. return False
  1130. def update_trade_cancelled(self, lifecycle_id: str, reason: str = "order_cancelled") -> bool:
  1131. """Update trade when entry order is cancelled (never opened)."""
  1132. try:
  1133. query = """
  1134. UPDATE trades
  1135. SET status = 'cancelled',
  1136. notes = ?,
  1137. updated_at = ?
  1138. WHERE trade_lifecycle_id = ? AND status = 'pending'
  1139. """
  1140. timestamp = datetime.now(timezone.utc).isoformat()
  1141. params = (f"Cancelled: {reason}", timestamp, lifecycle_id)
  1142. self._execute_query(query, params)
  1143. logger.info(f"❌ Trade lifecycle {lifecycle_id} cancelled: {reason}")
  1144. return True
  1145. except Exception as e:
  1146. logger.error(f"❌ Error updating trade cancelled: {e}")
  1147. return False
  1148. def link_stop_loss_to_trade(self, lifecycle_id: str, stop_loss_order_id: str,
  1149. stop_loss_price: float) -> bool:
  1150. """Link a stop loss order to a trade lifecycle."""
  1151. try:
  1152. query = """
  1153. UPDATE trades
  1154. SET stop_loss_order_id = ?,
  1155. stop_loss_price = ?,
  1156. updated_at = ?
  1157. WHERE trade_lifecycle_id = ? AND status = 'position_opened'
  1158. """
  1159. timestamp = datetime.now(timezone.utc).isoformat()
  1160. params = (stop_loss_order_id, stop_loss_price, timestamp, lifecycle_id)
  1161. self._execute_query(query, params)
  1162. logger.info(f"🛑 Linked stop loss order {stop_loss_order_id} (${stop_loss_price:.2f}) to trade {lifecycle_id}")
  1163. return True
  1164. except Exception as e:
  1165. logger.error(f"❌ Error linking stop loss to trade: {e}")
  1166. return False
  1167. def link_take_profit_to_trade(self, lifecycle_id: str, take_profit_order_id: str,
  1168. take_profit_price: float) -> bool:
  1169. """Link a take profit order to a trade lifecycle."""
  1170. try:
  1171. query = """
  1172. UPDATE trades
  1173. SET take_profit_order_id = ?,
  1174. take_profit_price = ?,
  1175. updated_at = ?
  1176. WHERE trade_lifecycle_id = ? AND status = 'position_opened'
  1177. """
  1178. timestamp = datetime.now(timezone.utc).isoformat()
  1179. params = (take_profit_order_id, take_profit_price, timestamp, lifecycle_id)
  1180. self._execute_query(query, params)
  1181. logger.info(f"🎯 Linked take profit order {take_profit_order_id} (${take_profit_price:.2f}) to trade {lifecycle_id}")
  1182. return True
  1183. except Exception as e:
  1184. logger.error(f"❌ Error linking take profit to trade: {e}")
  1185. return False
  1186. def get_trade_by_lifecycle_id(self, lifecycle_id: str) -> Optional[Dict[str, Any]]:
  1187. """Get trade by lifecycle ID."""
  1188. query = "SELECT * FROM trades WHERE trade_lifecycle_id = ?"
  1189. return self._fetchone_query(query, (lifecycle_id,))
  1190. def get_trade_by_symbol_and_status(self, symbol: str, status: str = 'position_opened') -> Optional[Dict[str, Any]]:
  1191. """Get trade by symbol and status."""
  1192. query = "SELECT * FROM trades WHERE symbol = ? AND status = ? ORDER BY updated_at DESC LIMIT 1"
  1193. return self._fetchone_query(query, (symbol, status))
  1194. def get_open_positions(self, symbol: Optional[str] = None) -> List[Dict[str, Any]]:
  1195. """Get all open positions, optionally filtered by symbol."""
  1196. if symbol:
  1197. query = "SELECT * FROM trades WHERE status = 'position_opened' AND symbol = ? ORDER BY position_opened_at DESC"
  1198. return self._fetch_query(query, (symbol,))
  1199. else:
  1200. query = "SELECT * FROM trades WHERE status = 'position_opened' ORDER BY position_opened_at DESC"
  1201. return self._fetch_query(query)
  1202. def get_trades_by_status(self, status: str, limit: int = 50) -> List[Dict[str, Any]]:
  1203. """Get trades by status."""
  1204. query = "SELECT * FROM trades WHERE status = ? ORDER BY updated_at DESC LIMIT ?"
  1205. return self._fetch_query(query, (status, limit))
  1206. def get_lifecycle_by_entry_order_id(self, entry_exchange_order_id: str, status: Optional[str] = None) -> Optional[Dict[str, Any]]:
  1207. """Get a trade lifecycle by its entry_order_id (exchange ID) and optionally by status."""
  1208. if status:
  1209. query = "SELECT * FROM trades WHERE entry_order_id = ? AND status = ? LIMIT 1"
  1210. params = (entry_exchange_order_id, status)
  1211. else:
  1212. query = "SELECT * FROM trades WHERE entry_order_id = ? LIMIT 1"
  1213. params = (entry_exchange_order_id,)
  1214. return self._fetchone_query(query, params)
  1215. def get_lifecycle_by_sl_order_id(self, sl_exchange_order_id: str, status: str = 'position_opened') -> Optional[Dict[str, Any]]:
  1216. """Get an active trade lifecycle by its stop_loss_order_id (exchange ID)."""
  1217. query = "SELECT * FROM trades WHERE stop_loss_order_id = ? AND status = ? LIMIT 1"
  1218. return self._fetchone_query(query, (sl_exchange_order_id, status))
  1219. def get_lifecycle_by_tp_order_id(self, tp_exchange_order_id: str, status: str = 'position_opened') -> Optional[Dict[str, Any]]:
  1220. """Get an active trade lifecycle by its take_profit_order_id (exchange ID)."""
  1221. query = "SELECT * FROM trades WHERE take_profit_order_id = ? AND status = ? LIMIT 1"
  1222. return self._fetchone_query(query, (tp_exchange_order_id, status))
  1223. def get_pending_stop_loss_activations(self) -> List[Dict[str, Any]]:
  1224. """Get open positions that need stop loss activation."""
  1225. query = """
  1226. SELECT * FROM trades
  1227. WHERE status = 'position_opened'
  1228. AND stop_loss_price IS NOT NULL
  1229. AND stop_loss_order_id IS NULL
  1230. ORDER BY updated_at ASC
  1231. """
  1232. return self._fetch_query(query)
  1233. def cleanup_old_cancelled_trades(self, days_old: int = 7) -> int:
  1234. """Clean up old cancelled trades (optional - for housekeeping)."""
  1235. try:
  1236. cutoff_date = (datetime.now(timezone.utc) - timedelta(days=days_old)).isoformat()
  1237. # Count before deletion
  1238. count_query = """
  1239. SELECT COUNT(*) as count FROM trades
  1240. WHERE status = 'cancelled' AND updated_at < ?
  1241. """
  1242. count_result = self._fetchone_query(count_query, (cutoff_date,))
  1243. count_to_delete = count_result['count'] if count_result else 0
  1244. if count_to_delete > 0:
  1245. delete_query = """
  1246. DELETE FROM trades
  1247. WHERE status = 'cancelled' AND updated_at < ?
  1248. """
  1249. self._execute_query(delete_query, (cutoff_date,))
  1250. logger.info(f"🧹 Cleaned up {count_to_delete} old cancelled trades (older than {days_old} days)")
  1251. return count_to_delete
  1252. except Exception as e:
  1253. logger.error(f"❌ Error cleaning up old cancelled trades: {e}")
  1254. return 0
  1255. def confirm_position_with_exchange(self, symbol: str, exchange_position_size: float,
  1256. exchange_open_orders: List[Dict]) -> bool:
  1257. """🆕 PHASE 4: Confirm position status with exchange before updating status."""
  1258. try:
  1259. # Get current trade status
  1260. current_trade = self.get_trade_by_symbol_and_status(symbol, 'position_opened')
  1261. if not current_trade:
  1262. return True # No open position to confirm
  1263. lifecycle_id = current_trade['trade_lifecycle_id']
  1264. has_open_orders = len([o for o in exchange_open_orders if o.get('symbol') == symbol]) > 0
  1265. # Only close position if exchange confirms no position AND no pending orders
  1266. if abs(exchange_position_size) < 1e-8 and not has_open_orders:
  1267. # Calculate realized P&L based on position side
  1268. position_side = current_trade['position_side']
  1269. entry_price_db = current_trade['entry_price'] # entry_price from db
  1270. # current_amount = current_trade['current_position_size'] # Not directly used for PNL calc here
  1271. # For a closed position, we need to calculate final P&L
  1272. # This would typically come from the closing trade, but for confirmation we estimate
  1273. estimated_pnl = current_trade.get('realized_pnl', 0) # Use existing realized_pnl if any
  1274. success = self.update_trade_position_closed(
  1275. lifecycle_id,
  1276. entry_price_db, # Using entry price from DB as estimate since position is confirmed closed
  1277. estimated_pnl,
  1278. "exchange_confirmed_closed"
  1279. )
  1280. if success:
  1281. logger.info(f"✅ Confirmed position closed for {symbol} with exchange")
  1282. return success
  1283. return True # Position still exists on exchange, no update needed
  1284. except Exception as e:
  1285. logger.error(f"❌ Error confirming position with exchange: {e}")
  1286. return False
  1287. def update_trade_market_data(self,
  1288. trade_lifecycle_id: str,
  1289. unrealized_pnl: Optional[float] = None,
  1290. mark_price: Optional[float] = None,
  1291. current_position_size: Optional[float] = None,
  1292. entry_price: Optional[float] = None,
  1293. liquidation_price: Optional[float] = None,
  1294. margin_used: Optional[float] = None,
  1295. leverage: Optional[float] = None,
  1296. position_value: Optional[float] = None) -> bool:
  1297. """Update market-related data for an open trade lifecycle.
  1298. Only updates fields for which a non-None value is provided.
  1299. """
  1300. try:
  1301. updates = []
  1302. params = []
  1303. if unrealized_pnl is not None:
  1304. updates.append("unrealized_pnl = ?")
  1305. params.append(unrealized_pnl)
  1306. if mark_price is not None:
  1307. updates.append("mark_price = ?")
  1308. params.append(mark_price)
  1309. if current_position_size is not None:
  1310. updates.append("current_position_size = ?")
  1311. params.append(current_position_size)
  1312. if entry_price is not None: # If exchange provides updated avg entry
  1313. updates.append("entry_price = ?")
  1314. params.append(entry_price)
  1315. if liquidation_price is not None:
  1316. updates.append("liquidation_price = ?")
  1317. params.append(liquidation_price)
  1318. if margin_used is not None:
  1319. updates.append("margin_used = ?")
  1320. params.append(margin_used)
  1321. if leverage is not None:
  1322. updates.append("leverage = ?")
  1323. params.append(leverage)
  1324. if position_value is not None:
  1325. updates.append("position_value = ?")
  1326. params.append(position_value)
  1327. if not updates:
  1328. logger.debug(f"No market data fields provided to update for lifecycle {trade_lifecycle_id}.")
  1329. return True # No update needed, not an error
  1330. timestamp = datetime.now(timezone.utc).isoformat()
  1331. updates.append("updated_at = ?")
  1332. params.append(timestamp)
  1333. set_clause = ", ".join(updates)
  1334. query = f"""
  1335. UPDATE trades
  1336. SET {set_clause}
  1337. WHERE trade_lifecycle_id = ? AND status = 'position_opened'
  1338. """
  1339. params.append(trade_lifecycle_id)
  1340. # Use the class's own connection self.conn
  1341. cursor = self.conn.cursor()
  1342. cursor.execute(query, tuple(params))
  1343. self.conn.commit()
  1344. updated_rows = cursor.rowcount
  1345. if updated_rows > 0:
  1346. logger.debug(f"💹 Updated market data for lifecycle {trade_lifecycle_id}. Fields: {updates}")
  1347. return True
  1348. else:
  1349. # This might happen if the lifecycle ID doesn't exist or status is not 'position_opened'
  1350. # logger.warning(f"⚠️ No trade found or not in 'position_opened' state for lifecycle {trade_lifecycle_id} to update market data.")
  1351. return False # Not necessarily an error
  1352. except Exception as e:
  1353. logger.error(f"❌ Error updating market data for trade lifecycle {trade_lifecycle_id}: {e}")
  1354. return False
  1355. # --- End Trade Lifecycle Management ---