import dotenv from 'dotenv';
dotenv.config({ path: __dirname + '/../../.env' });

import express from 'express';
import { lookupCache, storeInCache } from './cache-service';
import { closeRedis } from './similarity';

/**
 * Cache proxy server on :18790
 * Checks cache before forwarding to OpenClaw gateway at :18789.
 */

const PORT = parseInt(process.env.CACHE_PROXY_PORT || '18790');
const OPENCLAW_URL = process.env.OPENCLAW_GATEWAY_URL || 'http://127.0.0.1:18789';
const OPENCLAW_TOKEN = process.env.OPENCLAW_GATEWAY_TOKEN || '';

const app = express();
app.use(express.json({ limit: '1mb' }));

// Health check
app.get('/health', (_req, res) => {
  res.json({ status: 'ok', service: 'cache-proxy', timestamp: new Date().toISOString() });
});

// Main proxy endpoint
app.post('/v1/chat/completions', async (req, res) => {
  try {
    const body = req.body;
    const messages = body.messages || [];

    // Extract the last user message
    let lastUserMessage = '';
    for (let i = messages.length - 1; i >= 0; i--) {
      if (messages[i].role === 'user') {
        lastUserMessage = typeof messages[i].content === 'string'
          ? messages[i].content
          : (messages[i].content || []).map((c: any) => c.text || '').join(' ');
        break;
      }
    }

    if (!lastUserMessage) {
      return await forwardToOpenClaw(req, res, body);
    }

    // Determine agent from model name or header
    const agent = detectAgent(body.model || '', req.headers['x-agent'] as string);

    // Cache lookup
    const cacheResult = await lookupCache(lastUserMessage, agent);

    if (cacheResult.hit && cacheResult.response) {
      console.log(`[CACHE HIT] ${cacheResult.matchType} (${cacheResult.similarityScore.toFixed(2)}) agent=${agent} latency=${cacheResult.latencyMs}ms`);
      const cachedResponse = buildOpenAIResponse(cacheResult.response, body.model || 'cached', true);
      res.json(cachedResponse);
      return;
    }

    // Cache miss — forward to OpenClaw
    console.log(`[CACHE MISS] agent=${agent} query="${lastUserMessage.slice(0, 80)}..."`);

    const openClawResponse = await callOpenClaw(body);

    if (!openClawResponse.ok) {
      const errBody = await openClawResponse.text();
      res.status(openClawResponse.status).send(errBody);
      return;
    }

    const responseData: any = await openClawResponse.json();

    // Extract assistant response text
    const assistantContent = extractAssistantContent(responseData);
    const usage = responseData.usage || {};

    // Store in cache (async, don't block response)
    if (assistantContent && cacheResult.normalized) {
      storeInCache(cacheResult.normalized, agent, assistantContent, {
        modelUsed: responseData.model || body.model,
        tokensIn: usage.prompt_tokens,
        tokensOut: usage.completion_tokens,
        source: 'live',
      }).catch(err => console.error('Cache store error:', err));
    }

    res.json(responseData);
  } catch (err: any) {
    console.error('Proxy error:', err);
    try {
      await forwardToOpenClaw(req, res, req.body);
    } catch (fwdErr: any) {
      res.status(500).json({ error: 'Proxy error', message: err.message });
    }
  }
});

function detectAgent(model: string, headerAgent?: string): string {
  if (headerAgent) return headerAgent;
  const m = model.toLowerCase();
  if (m.includes('sportsclaw')) return 'sportsclaw';
  if (m.includes('cryptoclaw')) return 'cryptoclaw';
  return 'sportsclaw';
}

async function forwardToOpenClaw(req: express.Request, res: express.Response, body: any): Promise<void> {
  const response = await callOpenClaw(body);
  const responseBody = await response.text();
  res.status(response.status);
  for (const [key, value] of response.headers.entries()) {
    if (!['content-encoding', 'transfer-encoding', 'content-length'].includes(key.toLowerCase())) {
      res.setHeader(key, value);
    }
  }
  res.send(responseBody);
}

async function callOpenClaw(body: any): Promise<Response> {
  const token = OPENCLAW_TOKEN;
  return fetch(`${OPENCLAW_URL}/v1/chat/completions`, {
    method: 'POST',
    headers: {
      'Content-Type': 'application/json',
      'Authorization': `Bearer ${token}`,
    },
    body: JSON.stringify(body),
  });
}

function extractAssistantContent(responseData: any): string | null {
  try {
    const choices = responseData.choices || [];
    if (choices.length === 0) return null;
    const msg = choices[0].message || choices[0].delta;
    if (!msg) return null;
    return typeof msg.content === 'string' ? msg.content : null;
  } catch {
    return null;
  }
}

function buildOpenAIResponse(content: string, model: string, cached: boolean): any {
  return {
    id: `chatcmpl-cache-${Date.now()}`,
    object: 'chat.completion',
    created: Math.floor(Date.now() / 1000),
    model,
    choices: [{
      index: 0,
      message: {
        role: 'assistant',
        content,
      },
      finish_reason: 'stop',
    }],
    usage: {
      prompt_tokens: 0,
      completion_tokens: 0,
      total_tokens: 0,
    },
    _cache: cached ? { hit: true } : undefined,
  };
}

// Start server
app.listen(PORT, '127.0.0.1', () => {
  console.log(`Cache proxy running on http://127.0.0.1:${PORT}`);
});

// Graceful shutdown
process.on('SIGTERM', async () => {
  console.log('Cache proxy shutting down...');
  await closeRedis();
  process.exit(0);
});

process.on('SIGINT', async () => {
  await closeRedis();
  process.exit(0);
});
