#!/usr/bin/env python3
"""
Recovery Depth Scanner — analyzes trade logs to find:
1. Max recovery depths per thread
2. Lot sizes at each depth
3. Notional exposure at each depth
4. What lot adjustments are needed to stay within leverage limits

Usage:
    python3 depth_scanner.py results/trades_XXX.csv [starting_balance] [leverage]
    python3 depth_scanner.py --latest                # use latest trade file
"""
import csv, sys, os, glob
from collections import defaultdict

os.chdir(os.path.dirname(os.path.abspath(__file__)))

def scan_trades(filepath, starting_balance=20000, max_leverage=10):
    trades = []
    with open(filepath) as f:
        reader = csv.DictReader(f)
        for row in reader:
            trades.append({
                'id': int(row['trade_id']),
                'side': row['side'],
                'lots': float(row['lots']),
                'entry_price': float(row['entry_price']),
                'exit_price': float(row['exit_price']),
                'entry_time': row['entry_time'],
                'exit_time': row['exit_time'],
                'profit': float(row['profit']),
                'thread': row.get('magic_thread', ''),
                'comment': row.get('comment', ''),
            })

    # Parse recovery level from comment
    def get_level(comment):
        if 'REC_L' in comment:
            try:
                part = comment.split('REC_L')[1]
                level = int(part.split('_')[0])
                return level
            except:
                return 0
        return 0

    # ---- Per-thread analysis ----
    thread_data = defaultdict(lambda: {
        'max_level': 0, 'trades': 0, 'max_lot': 0,
        'level_lots': defaultdict(float),  # level -> max lot at that level
        'level_notional': defaultdict(float),  # level -> max notional
        'first_time': '', 'last_time': '',
        'profit': 0, 'max_concurrent_lots': 0,
    })

    for t in trades:
        th = t['thread']
        level = get_level(t['comment'])
        td = thread_data[th]
        td['trades'] += 1
        td['profit'] += t['profit']
        td['max_level'] = max(td['max_level'], level)
        td['max_lot'] = max(td['max_lot'], t['lots'])
        if not td['first_time']:
            td['first_time'] = t['entry_time']
        td['last_time'] = t['exit_time']

        # Track max lot per level
        if t['lots'] > td['level_lots'][level]:
            td['level_lots'][level] = t['lots']
            td['level_notional'][level] = t['lots'] * t['entry_price']

    # ---- Global level analysis ----
    level_stats = defaultdict(lambda: {
        'count': 0, 'max_lot': 0, 'min_lot': 999,
        'total_lots': 0, 'max_notional': 0,
        'threads_reaching': set(), 'profits': [],
    })

    for t in trades:
        level = get_level(t['comment'])
        ls = level_stats[level]
        ls['count'] += 1
        ls['max_lot'] = max(ls['max_lot'], t['lots'])
        ls['min_lot'] = min(ls['min_lot'], t['lots'])
        ls['total_lots'] += t['lots']
        ls['max_notional'] = max(ls['max_notional'], t['lots'] * t['entry_price'])
        ls['threads_reaching'].add(t['thread'])
        ls['profits'].append(t['profit'])

    # ---- Find deepest threads ----
    deep_threads = sorted(thread_data.items(), key=lambda x: x[1]['max_level'], reverse=True)

    # ---- Print Report ----
    print("=" * 90)
    print(f"  RECOVERY DEPTH SCANNER")
    print(f"  File: {os.path.basename(filepath)}")
    print(f"  Trades: {len(trades):,} | Starting Balance: ${starting_balance:,.0f} | Max Leverage: {max_leverage}x")
    print("=" * 90)

    # Global level summary
    max_affordable_notional = starting_balance * max_leverage
    print(f"\n--- RECOVERY LEVEL SUMMARY ---")
    print(f"  Max affordable notional at {max_leverage}x on ${starting_balance:,.0f}: ${max_affordable_notional:,.0f}")
    print(f"  Max affordable lots at $100K BTC: {max_affordable_notional/100000:.2f}")
    print()
    print(f"  {'Level':>6} {'Trades':>7} {'Threads':>8} {'MaxLot':>10} {'AvgLot':>10} {'MaxNotional':>14} {'Exceeds?':>10}")
    print(f"  {'-'*70}")

    for level in sorted(level_stats.keys()):
        ls = level_stats[level]
        avg_lot = ls['total_lots'] / ls['count'] if ls['count'] > 0 else 0
        exceeds = "⚠️ YES" if ls['max_notional'] > max_affordable_notional else ""
        lbl = 'Entry' if level == 0 else f'L{level}'
        print(f"  {lbl:>6} {ls['count']:>7} {len(ls['threads_reaching']):>8} "
              f"{ls['max_lot']:>10.4f} {avg_lot:>10.4f} ${ls['max_notional']:>13,.0f} {exceeds:>10}")

    # Deepest threads detail
    print(f"\n--- TOP 15 DEEPEST THREADS ---")
    print(f"  {'Thread':>10} {'MaxLvl':>7} {'Trades':>7} {'MaxLot':>10} {'Profit':>12} {'Period'}")
    print(f"  {'-'*80}")
    for th, td in deep_threads[:15]:
        period = f"{td['first_time'][:10]} → {td['last_time'][:10]}"
        print(f"  {th:>10} {'L'+str(td['max_level']):>7} {td['trades']:>7} "
              f"{td['max_lot']:>10.4f} ${td['profit']:>11,.2f} {period}")

    # Per-level lot breakdown for deepest thread
    if deep_threads:
        th_id, td = deep_threads[0]
        print(f"\n--- DEEPEST THREAD #{th_id} — LEVEL-BY-LEVEL LOTS ---")
        print(f"  {'Level':>6} {'MaxLot':>10} {'Notional':>14} {'Fits {max_leverage}x?':>12}")
        for level in sorted(td['level_lots'].keys()):
            lot = td['level_lots'][level]
            notional = td['level_notional'][level]
            fits = "✅" if notional <= max_affordable_notional else f"❌ {notional/max_affordable_notional:.1f}x over"
            lbl = 'Entry' if level == 0 else f'L{level}'
            print(f"  {lbl:>6} {lot:>10.4f} ${notional:>13,.0f} {fits:>12}")

    # Lot sizing recommendation
    print(f"\n--- LOT SIZING RECOMMENDATION ---")
    max_level_seen = max(level_stats.keys()) if level_stats else 0
    if max_level_seen > 0:
        deepest_max_lot = level_stats[max_level_seen]['max_lot']
        deepest_notional = level_stats[max_level_seen]['max_notional']
        if deepest_notional > max_affordable_notional:
            scale_factor = max_affordable_notional / deepest_notional
            recommended_base = 0.02 * scale_factor
            print(f"  Deepest level: L{max_level_seen}")
            print(f"  Max lot at L{max_level_seen}: {deepest_max_lot:.4f} (${deepest_notional:,.0f} notional)")
            print(f"  Exceeds {max_leverage}x by: {deepest_notional/max_affordable_notional:.1f}x")
            print(f"  Scale factor needed: {scale_factor:.4f}")
            print(f"  Recommended base lot: {recommended_base:.4f} (down from 0.02)")
            print(f"  Or increase starting balance to: ${deepest_notional/max_leverage:,.0f}")
        else:
            print(f"  ✅ All recovery levels fit within {max_leverage}x leverage at ${starting_balance:,.0f}")
            print(f"  Max lot: {deepest_max_lot:.4f} at L{max_level_seen}")
    else:
        print(f"  No recovery levels detected")

    return {
        'max_level': max_level_seen,
        'level_stats': dict(level_stats),
        'deep_threads': deep_threads[:15],
    }


if __name__ == "__main__":
    if len(sys.argv) > 1 and sys.argv[1] == '--latest':
        files = sorted(glob.glob("results/trades_*.csv"), reverse=True)[:1]
        if not files:
            print("No trade files found")
            sys.exit(1)
        filepath = files[0]
    elif len(sys.argv) > 1:
        filepath = sys.argv[1]
    else:
        files = sorted(glob.glob("results/trades_*.csv"), reverse=True)[:1]
        if not files:
            print("No trade files found")
            sys.exit(1)
        filepath = files[0]

    balance = float(sys.argv[2]) if len(sys.argv) > 2 else 20000
    leverage = float(sys.argv[3]) if len(sys.argv) > 3 else 10

    scan_trades(filepath, balance, leverage)
