import pool from '../db';
import { getRedis } from './similarity';

/**
 * Cache event logging + daily metrics rollup.
 * Tracks hits, misses, tokens saved, and provides rollup for dashboards.
 */

export type EventType = 'hit' | 'miss' | 'expire' | 'warm';

export async function logEvent(params: {
  cacheEntryId: string | null;
  eventType: EventType;
  agent: string;
  normalizedQuery?: string;
  similarityScore?: number;
  tokensSaved?: number;
  latencyMs?: number;
}): Promise<void> {
  try {
    await pool.query(`
      INSERT INTO cache_events (cache_entry_id, event_type, agent, normalized_query, similarity_score, tokens_saved, latency_ms)
      VALUES ($1, $2, $3, $4, $5, $6, $7)
    `, [
      params.cacheEntryId,
      params.eventType,
      params.agent,
      params.normalizedQuery || null,
      params.similarityScore || null,
      params.tokensSaved || 0,
      params.latencyMs || null,
    ]);

    // Also increment Redis counters for real-time stats
    const r = getRedis();
    const pipeline = r.pipeline();
    if (params.eventType === 'hit') {
      pipeline.incr(`qcache:metrics:${params.agent}:hits`);
      if (params.tokensSaved) {
        pipeline.incrby(`qcache:metrics:${params.agent}:saved`, params.tokensSaved);
      }
    } else if (params.eventType === 'miss') {
      pipeline.incr(`qcache:metrics:${params.agent}:misses`);
    }
    await pipeline.exec();
  } catch (err) {
    console.error('Log cache event error:', err);
  }
}

/**
 * Flush Redis counters into cache_metrics_daily table.
 * Called by the warming worker on each cycle.
 */
export async function flushDailyMetrics(): Promise<void> {
  const agents = ['sportsclaw', 'cryptoclaw', 'main'];
  const r = getRedis();
  const today = new Date().toISOString().slice(0, 10);

  for (const agent of agents) {
    try {
      // Get and reset Redis counters atomically
      const pipeline = r.pipeline();
      pipeline.getset(`qcache:metrics:${agent}:hits`, '0');
      pipeline.getset(`qcache:metrics:${agent}:misses`, '0');
      pipeline.getset(`qcache:metrics:${agent}:saved`, '0');
      const results = await pipeline.exec();

      const hits = parseInt((results?.[0]?.[1] as string) || '0') || 0;
      const misses = parseInt((results?.[1]?.[1] as string) || '0') || 0;
      const saved = parseInt((results?.[2]?.[1] as string) || '0') || 0;

      if (hits === 0 && misses === 0 && saved === 0) continue;

      // Get unique queries count for today
      const uniqueResult = await pool.query(`
        SELECT COUNT(DISTINCT normalized_query) as cnt
        FROM cache_events
        WHERE agent = $1 AND created_at::date = $2::date
      `, [agent, today]);
      const uniqueQueries = parseInt(uniqueResult.rows[0]?.cnt || '0');

      // Get top queries for today
      const topResult = await pool.query(`
        SELECT qc.normalized_query, COUNT(*) as hits
        FROM cache_events ce
        JOIN query_cache qc ON ce.cache_entry_id = qc.id
        WHERE ce.agent = $1
          AND ce.event_type = 'hit'
          AND ce.created_at::date = $2::date
        GROUP BY qc.normalized_query
        ORDER BY hits DESC
        LIMIT 10
      `, [agent, today]);
      const topQueries = topResult.rows.map(r => ({
        query: r.normalized_query,
        hits: parseInt(r.hits),
      }));

      // Upsert daily metrics
      await pool.query(`
        INSERT INTO cache_metrics_daily (date, agent, total_queries, cache_hits, cache_misses, tokens_saved, unique_queries, top_queries)
        VALUES ($1, $2, $3, $4, $5, $6, $7, $8)
        ON CONFLICT (date, agent) DO UPDATE SET
          total_queries = cache_metrics_daily.total_queries + EXCLUDED.total_queries,
          cache_hits = cache_metrics_daily.cache_hits + EXCLUDED.cache_hits,
          cache_misses = cache_metrics_daily.cache_misses + EXCLUDED.cache_misses,
          tokens_saved = cache_metrics_daily.tokens_saved + EXCLUDED.tokens_saved,
          unique_queries = EXCLUDED.unique_queries,
          top_queries = EXCLUDED.top_queries
      `, [today, agent, hits + misses, hits, misses, saved, uniqueQueries, JSON.stringify(topQueries)]);
    } catch (err) {
      console.error(`Flush metrics error for ${agent}:`, err);
    }
  }
}

/**
 * Prune expired cache entries from PostgreSQL
 */
export async function pruneExpired(): Promise<number> {
  try {
    const result = await pool.query(`
      DELETE FROM query_cache WHERE expires_at < NOW()
    `);
    return result.rowCount || 0;
  } catch (err) {
    console.error('Prune expired error:', err);
    return 0;
  }
}
