"""Fetch MMA/UFC odds from BallDontLie API."""
import json
import os
import time
from datetime import datetime
from pathlib import Path
import urllib.request
import urllib.parse

API_KEY = os.environ.get('BALLDONTLIE_API_KEY')
if not API_KEY:
    raise RuntimeError("BALLDONTLIE_API_KEY is not set. Put it in the environment (recommended: .env / systemd EnvironmentFile).")
BASE_URL = 'https://api.balldontlie.io/mma/v1'
DATA_DIR = Path('/var/www/html/eventheodds/data/betting')

def make_request(endpoint, params=None):
    url = f'{BASE_URL}/{endpoint}'
    if params:
        url += '?' + urllib.parse.urlencode(params, doseq=True)
    req = urllib.request.Request(url, headers={'Authorization': API_KEY})
    try:
        with urllib.request.urlopen(req, timeout=30) as resp:
            return json.loads(resp.read().decode())
    except Exception as e:
        print(f'Error: {e}')
        return None

def fetch_mma_odds():
    """Fetch MMA events, fights and odds."""
    data_file = DATA_DIR / 'mma_historical.json'
    
    # Load existing data
    existing = []
    if data_file.exists():
        with open(data_file) as f:
            existing = json.load(f)
    existing_ids = {d.get('fight_id') for d in existing}
    
    # Get events
    print('Fetching MMA events...')
    events = []
    cursor = None
    while True:
        params = {'per_page': 100}
        if cursor:
            params['cursor'] = cursor
        resp = make_request('events', params)
        if not resp or not resp.get('data'):
            break
        events.extend(resp['data'])
        cursor = resp.get('meta', {}).get('next_cursor')
        if not cursor or len(events) >= 200:
            break
    
    print(f'Found {len(events)} events')
    
    new_fights = []
    for event in events[:50]:  # Process top 50 events
        event_id = event['id']
        event_name = event.get('name', 'Unknown')
        event_date = event.get('date', '')[:10]
        
        # Get odds for event
        odds_resp = make_request('odds', {'event_id': event_id, 'per_page': 100})
        if not odds_resp or not odds_resp.get('data'):
            continue
            
        # Group odds by fight
        fight_odds = {}
        for odd in odds_resp['data']:
            fight_id = odd.get('fight_id')
            if not fight_id or fight_id in existing_ids:
                continue
            if fight_id not in fight_odds:
                fight_odds[fight_id] = {
                    'fight_id': fight_id,
                    'event_id': event_id,
                    'event': event_name,
                    'date': event_date,
                    'fighter1': odd.get('fighter1', {}).get('name', 'Unknown'),
                    'fighter2': odd.get('fighter2', {}).get('name', 'Unknown'),
                    'odds': {
                        'source': 'balldontlie',
                        'sportsbooks': {}
                    }
                }
            
            vendor = odd.get('vendor', 'unknown')
            fight_odds[fight_id]['odds']['sportsbooks'][vendor] = {
                'fighter1_odds': odd.get('fighter1_odds'),
                'fighter2_odds': odd.get('fighter2_odds')
            }
        
        new_fights.extend(fight_odds.values())
        print(f'  {event_name}: {len(fight_odds)} new fights with odds')
        time.sleep(0.5)  # Rate limit
    
    # Merge and save
    all_data = existing + new_fights
    with open(data_file, 'w') as f:
        json.dump(all_data, f, indent=2)
    
    print(f'\n=== MMA ODDS SUMMARY ===')
    print(f'Existing: {len(existing)}')
    print(f'New: {len(new_fights)}')
    print(f'Total: {len(all_data)}')
    return len(new_fights)

if __name__ == '__main__':
    DATA_DIR.mkdir(parents=True, exist_ok=True)
    fetch_mma_odds()
