#!/usr/bin/env python3
"""
Run Auto Hedge-Mart V4 backtest on BTCUSD tick data.

Resamples tick data (bid/ask CSV) into M5 candles, then runs
the hedge-mart recovery grid strategy.

Usage:
    python run_btc_backtest.py                     # full dataset, M5
    python run_btc_backtest.py --max-rows=1000000  # first 1M ticks
    python run_btc_backtest.py --tf=15min          # 15-minute candles
"""

import sys
import os

# Ensure we're in the project dir
os.chdir(os.path.dirname(os.path.abspath(__file__)))

import pandas as pd
import numpy as np
from datetime import datetime

from settings import (
    SYMBOL, INITIAL_BALANCE, COMMISSION_PERCENT, SLIPPAGE_PIPS,
    ENTRY_TP_PIPS, ENTRY_SL_PIPS, RECOVERY_GRID_STEPS, RECOVERY_TPS,
    CRYPTO_PAIRS, RECOVERY_PROFIT_TARGET, THREAD_PROFIT_TARGET,
    MAX_INITIAL_ORDERS, FIBO_BARS_BACK, RECOVERY_FIBO_ZONE,
    ADAPTIVE_GRID, ADAPTIVE_ATR_BARS, REFERENCE_ATR,
    ADAPTIVE_MIN_SCALE, ADAPTIVE_MAX_SCALE,
)
from data_loader import DataLoader, OHLCV
from backtest import BacktestEngine
from utils import setup_logging


CSV_PATH = "BTCUSD.csv"


def load_ticks_to_candles(csv_path: str, timeframe: str = "5min",
                          max_rows: int = None) -> DataLoader:
    """Load tick CSV and resample to OHLCV candles."""
    print(f"Loading tick data from {csv_path}...")

    read_params = {"filepath_or_buffer": csv_path}
    if max_rows:
        read_params["nrows"] = max_rows

    df = pd.read_csv(**read_params)
    print(f"Loaded {len(df):,} ticks")

    # Parse timestamps: "20250622 00:00:00:299"
    # Replace last colon before milliseconds with a dot for pandas
    df["datetime"] = pd.to_datetime(
        df["Timestamp"].str.replace(r":(\d{3})$", r".\1", regex=True),
        format="%Y%m%d %H:%M:%S.%f"
    )

    # Mid price for OHLCV
    df["mid"] = (df["Bid price"] + df["Ask price"]) / 2.0

    # Resample to candles
    df = df.set_index("datetime")
    ohlcv = df["mid"].resample(timeframe).ohlc()
    ohlcv.columns = ["open", "high", "low", "close"]
    ohlcv["volume"] = df["Bid volume"].resample(timeframe).sum()
    ohlcv = ohlcv.dropna()

    print(f"Resampled to {len(ohlcv):,} {timeframe} candles")
    print(f"Date range: {ohlcv.index[0]} to {ohlcv.index[-1]}")
    print(f"Price range: ${ohlcv['low'].min():,.1f} to ${ohlcv['high'].max():,.1f}")

    # Convert to DataLoader
    loader = DataLoader(SYMBOL)
    candles = []
    for ts, row in ohlcv.iterrows():
        candles.append(OHLCV(
            timestamp=ts.to_pydatetime(),
            open=float(row["open"]),
            high=float(row["high"]),
            low=float(row["low"]),
            close=float(row["close"]),
            volume=float(row["volume"]),
        ))
    loader.data = candles
    return loader


def print_params():
    """Print the auto-normalized parameters for BTCUSD."""
    pair_cfg = CRYPTO_PAIRS.get(SYMBOL, {})
    pip = pair_cfg.get("pip_size", 0.1)

    print(f"\n{'=' * 60}")
    print(f"AUTO HEDGE-MART V4 — {SYMBOL}")
    print(f"{'=' * 60}")
    print(f"  Ref price:       ${pair_cfg.get('ref_price', 0):,.0f}")
    print(f"  Pip size:        {pip}")
    print(f"  Balance:         ${INITIAL_BALANCE:,.0f}")
    print(f"  Commission:      {COMMISSION_PERCENT}%")
    print(f"  Max orders:      {MAX_INITIAL_ORDERS}")
    print(f"  Fibo bars back:  {FIBO_BARS_BACK}")
    print(f"  Recovery Fibo:   {RECOVERY_FIBO_ZONE}")
    print(f"")
    print(f"  Entry TP:        {ENTRY_TP_PIPS} pips  =  ${ENTRY_TP_PIPS * pip:,.1f}")
    sl_str = f"{ENTRY_SL_PIPS} pips  =  ${ENTRY_SL_PIPS * pip:,.1f}" if ENTRY_SL_PIPS > 0 else "DISABLED"
    print(f"  Entry SL:        {sl_str}")
    print(f"")
    print(f"  Recovery grid ({len(RECOVERY_GRID_STEPS)} levels):")
    cumulative = 0
    for i, step in enumerate(RECOVERY_GRID_STEPS[:5]):
        cumulative += step
        print(f"    L{i+1}: +{step} pips (cum {cumulative})  =  ${step * pip:,.1f}  (cum ${cumulative * pip:,.1f})")
    if len(RECOVERY_GRID_STEPS) > 5:
        print(f"    ... ({len(RECOVERY_GRID_STEPS) - 5} more levels)")
    print(f"")
    print(f"  Recovery TPs:    {RECOVERY_TPS}")
    print(f"  Recovery $:      ${RECOVERY_PROFIT_TARGET:.1f}")
    print(f"  Thread profit:   ${THREAD_PROFIT_TARGET:.1f}")
    print(f"")
    if ADAPTIVE_GRID:
        print(f"  ** ADAPTIVE GRID ENABLED **")
        print(f"  ATR lookback:    {ADAPTIVE_ATR_BARS} bars")
        print(f"  Reference ATR:   ${REFERENCE_ATR:.1f}")
        print(f"  Scale range:     {ADAPTIVE_MIN_SCALE:.1f}x - {ADAPTIVE_MAX_SCALE:.1f}x")
        print(f"  (Grid/TP auto-scale to current volatility)")
    print(f"{'=' * 60}\n")


def main():
    max_rows = None
    timeframe = "5min"

    # Parse args
    for arg in sys.argv[1:]:
        if arg.startswith("--max-rows="):
            max_rows = int(arg.split("=")[1])
        elif arg.startswith("--tf="):
            timeframe = arg.split("=")[1]
        elif arg in ("-h", "--help"):
            print(__doc__)
            return

    setup_logging(console=True, file=True)

    # Show normalized parameters
    print_params()

    # Load and resample
    data = load_ticks_to_candles(CSV_PATH, timeframe=timeframe, max_rows=max_rows)
    print(f"\nRunning backtest: {len(data)} candles, ${INITIAL_BALANCE:,.0f} balance\n")

    # Run backtest
    engine = BacktestEngine(
        symbol=SYMBOL,
        initial_balance=INITIAL_BALANCE,
        commission_percent=COMMISSION_PERCENT,
        slippage_pips=SLIPPAGE_PIPS,
    )

    total = len(data)

    def progress(current, total):
        pct = current / total * 100
        print(f"\rProgress: {pct:.1f}% ({current:,}/{total:,})", end="", flush=True)

    result = engine.run(data, progress_callback=progress)
    print()

    result.print_summary()

    # Save results JSON
    import json
    with open("btc_backtest_result.json", "w") as f:
        json.dump(result.to_dict(), f, indent=2)
    print(f"\nResults saved to btc_backtest_result.json")

    return result


if __name__ == "__main__":
    main()
