/**
 * aggregate-costs.ts — Nightly aggregation worker
 *
 * Rolls up rm_api_usage + rm_forecast_open_log into rm_daily_cost_summary.
 * Also computes rm_forecast_batch_summary from worker run patterns.
 *
 * Usage: npx tsx src/workers/aggregate-costs.ts [--date 2026-03-01] [--days 7]
 *
 * Cron: Run nightly at 1:00 AM ET
 */

import pool from '../db';

const args = process.argv.slice(2);
const dateArg = args.includes('--date') ? args[args.indexOf('--date') + 1] : null;
const daysArg = args.includes('--days') ? parseInt(args[args.indexOf('--days') + 1]) : 1;

async function getModelPricing(): Promise<Record<string, { input: number; output: number }>> {
  try {
    const { rows } = await pool.query(
      'SELECT model_name, input_cost_per_m, output_cost_per_m FROM rm_model_pricing WHERE active = true'
    );
    const pricing: Record<string, { input: number; output: number }> = {};
    for (const row of rows) {
      pricing[row.model_name] = {
        input: parseFloat(row.input_cost_per_m),
        output: parseFloat(row.output_cost_per_m),
      };
    }
    return pricing;
  } catch {
    // Fallback defaults
    return {
      'anthropic/claude-opus-4-6': { input: 15, output: 75 },
      'grok-4-1-fast-reasoning': { input: 3, output: 15 },
    };
  }
}

function estimateCost(pricing: Record<string, { input: number; output: number }>, model: string, inputTokens: number, outputTokens: number): number {
  const p = pricing[model] || { input: 3, output: 15 };
  return (inputTokens / 1_000_000) * p.input + (outputTokens / 1_000_000) * p.output;
}

async function aggregateDay(dateStr: string, pricing: Record<string, { input: number; output: number }>): Promise<void> {
  console.log(`[aggregate] Processing ${dateStr}...`);

  // 1. Roll up API usage by feature_type (category), league, model, provider
  const { rows: usageRows } = await pool.query(`
    SELECT
      category AS feature_type,
      league,
      model,
      provider,
      COUNT(*)::int AS total_calls,
      COALESCE(SUM(input_tokens), 0)::bigint AS input_tokens,
      COALESCE(SUM(output_tokens), 0)::bigint AS output_tokens,
      COALESCE(SUM(total_tokens), 0)::bigint AS total_tokens,
      COUNT(*) FILTER (WHERE success = false)::int AS error_count,
      ROUND(AVG(response_time_ms) FILTER (WHERE success = true))::int AS avg_response_ms
    FROM rm_api_usage
    WHERE created_at::date = $1::date
    GROUP BY category, league, model, provider
  `, [dateStr]);

  // Upsert each row into rm_daily_cost_summary
  for (const row of usageRows) {
    const cost = estimateCost(pricing, row.model, Number(row.input_tokens), Number(row.output_tokens));
    await pool.query(`
      INSERT INTO rm_daily_cost_summary (date, feature_type, league, model, provider, total_calls, input_tokens, output_tokens, total_tokens, cost_estimate, error_count, avg_response_ms)
      VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10, $11, $12)
      ON CONFLICT (date, feature_type, league, model, provider) DO UPDATE SET
        total_calls = EXCLUDED.total_calls,
        input_tokens = EXCLUDED.input_tokens,
        output_tokens = EXCLUDED.output_tokens,
        total_tokens = EXCLUDED.total_tokens,
        cost_estimate = EXCLUDED.cost_estimate,
        error_count = EXCLUDED.error_count,
        avg_response_ms = EXCLUDED.avg_response_ms,
        created_at = NOW()
    `, [
      dateStr, row.feature_type, row.league || null, row.model, row.provider,
      row.total_calls, row.input_tokens, row.output_tokens, row.total_tokens,
      cost, row.error_count, row.avg_response_ms,
    ]);
  }

  // 2. Build forecast batch summary from usage patterns
  // Group forecast-category calls by league to create batch summaries
  const { rows: batchRows } = await pool.query(`
    SELECT
      league,
      subcategory,
      COUNT(*)::int AS total_forecasts,
      COALESCE(SUM(total_tokens), 0)::bigint AS total_tokens,
      COALESCE(SUM(input_tokens), 0)::bigint AS input_tokens,
      COALESCE(SUM(output_tokens), 0)::bigint AS output_tokens,
      COUNT(*) FILTER (WHERE success = false)::int AS error_count,
      EXTRACT(EPOCH FROM (MAX(created_at) - MIN(created_at)))::int * 1000 AS duration_ms
    FROM rm_api_usage
    WHERE created_at::date = $1::date
      AND category IN ('forecast', 'team_props')
    GROUP BY league, subcategory
  `, [dateStr]);

  for (const row of batchRows) {
    const batchType = row.subcategory === 'precomputed' ? 'weather_report' : 'morning_batch';
    const cost = estimateCost(pricing, 'anthropic/claude-opus-4-6', Number(row.input_tokens), Number(row.output_tokens));
    await pool.query(`
      INSERT INTO rm_forecast_batch_summary (date, batch_type, league, total_forecasts, total_tokens, total_cost, error_count, duration_ms)
      VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
      ON CONFLICT (date, batch_type, league) DO UPDATE SET
        total_forecasts = EXCLUDED.total_forecasts,
        total_tokens = EXCLUDED.total_tokens,
        total_cost = EXCLUDED.total_cost,
        error_count = EXCLUDED.error_count,
        duration_ms = EXCLUDED.duration_ms,
        created_at = NOW()
    `, [dateStr, batchType, row.league || null, row.total_forecasts, row.total_tokens, cost, row.error_count, row.duration_ms]);
  }

  console.log(`[aggregate] ${dateStr}: ${usageRows.length} usage groups, ${batchRows.length} batch groups`);
}

async function main(): Promise<void> {
  const pricing = await getModelPricing();
  console.log(`[aggregate] Loaded pricing for ${Object.keys(pricing).length} models`);

  if (dateArg) {
    // Process specific date
    await aggregateDay(dateArg, pricing);
  } else {
    // Process last N days (default 1 = yesterday)
    for (let i = 1; i <= daysArg; i++) {
      const d = new Date();
      d.setDate(d.getDate() - i);
      const dateStr = d.toISOString().split('T')[0];
      await aggregateDay(dateStr, pricing);
    }
    // Also process today (partial)
    const today = new Date().toISOString().split('T')[0];
    await aggregateDay(today, pricing);
  }

  console.log('[aggregate] Done');
  await pool.end();
}

main().catch((err) => {
  console.error('[aggregate] Fatal error:', err);
  process.exit(1);
});
