/**
 * Autonomous Facebook Page Monitoring Agent
 * 
 * Continuously monitors Facebook pages for new content,
 * automatically scrapes new posts/comments, and triggers analysis.
 * 
 * Usage:
 *   agent-monitor -- --investigation <id> --page <url> --interval <minutes>
 */

import {
  createAgentSession,
  SessionManager,
  SettingsManager,
  AuthStorage,
  ModelRegistry,
  createReadTool,
  createBashTool,
} from "@mariozechner/pi-coding-agent";
import { Type } from "@sinclair/typebox";
import { randomUUID } from "node:crypto";
import { setTimeout } from "node:timers/promises";
import { query } from "../db.js";
import type { FbPostData, FbCommentData } from "../types.js";

// ─── CLI Arguments ──────────────────────────────────────────────────────────

interface MonitorArgs {
  investigation: string;
  page: string;
  interval: number;
  depth: number;
  autoIngest: boolean;
  alertThreshold: number;
}

function parseArgs(): MonitorArgs {
  const args = process.argv.slice(2);
  const config: MonitorArgs = {
    investigation: "default",
    page: "",
    interval: 30,
    depth: 25,
    autoIngest: true,
    alertThreshold: 10,
  };

  for (let i = 0; i < args.length; i++) {
    switch (args[i]) {
      case "--investigation":
      case "-i":
        config.investigation = args[++i];
        break;
      case "--page":
      case "-p":
        config.page = args[++i];
        break;
      case "--interval":
        config.interval = parseInt(args[++i], 10);
        break;
      case "--depth":
        config.depth = parseInt(args[++i], 10);
        break;
      case "--auto-ingest":
        config.autoIngest = args[++i] === "true";
        break;
      case "--alert-threshold":
        config.alertThreshold = parseInt(args[++i], 10);
        break;
    }
  }

  if (!config.page) {
    console.error("Error: --page <url> is required");
    process.exit(1);
  }

  return config;
}

// ─── Monitoring Agent ───────────────────────────────────────────────────────

interface MonitoringContext {
  investigationId: string;
  pageUrl: string;
  lastCheckTime: Date;
  postsCollected: number;
  alertsTriggered: number;
  consecutiveFailures: number;
}

async function createMonitoringAgent(ctx: MonitoringContext) {
  const authStorage = AuthStorage.create();
  const modelRegistry = ModelRegistry.create(authStorage);
  
  const { session } = await createAgentSession({
    cwd: process.cwd(),
    sessionManager: SessionManager.inMemory(),
    settingsManager: SettingsManager.inMemory({
      compaction: { enabled: false },
      retry: { enabled: true, maxRetries: 3 },
    }),
    authStorage,
    modelRegistry,
    tools: [
      createReadTool(process.cwd()),
      createBashTool(process.cwd()),
    ],
  });

  // Subscribe to session events
  session.subscribe((event) => {
    if (event.type === "message_update" && event.assistantMessageEvent.type === "text_delta") {
      process.stdout.write(event.assistantMessageEvent.delta);
    }
    
    if (event.type === "tool_execution_end") {
      console.log(`\n[Tool] ${event.toolName}: ${event.isError ? "❌ error" : "✅ success"}`);
    }
  });

  return session;
}

async function checkForNewContent(ctx: MonitoringContext): Promise<{
  hasNewContent: boolean;
  newPostCount: number;
  latestPostTime: Date | null;
}> {
  try {
    // Get entity ID for the page
    const pageId = extractFacebookPageId(ctx.pageUrl);
    const entityId = `fb_page_${pageId}`;

    // Query for latest post time
    const result = await query(
      `SELECT MAX(published_at) as latest_post, COUNT(*) as post_count
       FROM content
       WHERE entity_id = $1 AND type = 'post'`,
      [entityId]
    );

    const row = result.rows[0];
    const latestPostTime = row.latest_post ? new Date(row.latest_post as string | number) : null;
    const hasNewContent = !latestPostTime || latestPostTime > ctx.lastCheckTime;

    return {
      hasNewContent,
      newPostCount: hasNewContent ? Number(row.post_count) : 0,
      latestPostTime,
    };
  } catch (error) {
    console.error("[Monitor] Error checking content:", error);
    throw error;
  }
}

async function scrapeNewContent(ctx: MonitoringContext, depth: number) {
  console.log(`\n[Monitor] Scraping new content from ${ctx.pageUrl}...`);
  
  const scrapeArgs = [
    ctx.pageUrl,
    "--depth", String(depth),
    "--investigation", ctx.investigationId,
    "--save",
    "--comments",
  ];

  console.log(`[Monitor] Executing: fb-scrape ${scrapeArgs.join(" ")}`);

  const { spawn } = await import("node:child_process");

  return new Promise<void>((resolve, reject) => {
    const proc = spawn("fb-scrape", scrapeArgs, { stdio: "inherit" });
    
    proc.on("close", (code) => {
      if (code === 0) {
        resolve();
      } else {
        reject(new Error(`Scrape failed with code ${code}`));
      }
    });
    
    proc.on("error", reject);
  });
}

async function triggerAnalysis(ctx: MonitoringContext, contentIds: string[]) {
  console.log(`\n[Monitor] Triggering analysis for ${contentIds.length} content items...`);
  
  // This would typically call the content analyzer agent
  // For now, we'll just log the IDs for batch processing
  console.log(`[Monitor] Content IDs for analysis: ${contentIds.join(", ")}`);
}

async function sendAlert(ctx: MonitoringContext, type: string, details: Record<string, unknown>) {
  ctx.alertsTriggered++;
  
  console.log(`\n⚠️  ALERT [${type}]`);
  console.log(`Investigation: ${ctx.investigationId}`);
  console.log(`Page: ${ctx.pageUrl}`);
  console.log(`Details:`, JSON.stringify(details, null, 2));
  
  // TODO: Integrate with notification system (email, Slack, etc.)
  // Could use gmcli skill to send email alerts
}

async function monitoringLoop(config: MonitorArgs) {
  const ctx: MonitoringContext = {
    investigationId: config.investigation,
    pageUrl: config.page,
    lastCheckTime: new Date(Date.now() - config.interval * 60 * 1000),
    postsCollected: 0,
    alertsTriggered: 0,
    consecutiveFailures: 0,
  };

  console.log("🔍 CultGuard Facebook Monitor");
  console.log(`Investigation: ${ctx.investigationId}`);
  console.log(`Page: ${ctx.pageUrl}`);
  console.log(`Interval: ${config.interval} minutes`);
  console.log(`Auto-ingest: ${config.autoIngest}`);
  console.log(`Alert threshold: ${config.alertThreshold} posts\n`);

  let cycleCount = 0;

  while (true) {
    cycleCount++;
    console.log(`\n${"=".repeat(60)}`);
    console.log(`[Cycle ${cycleCount}] ${new Date().toISOString()}`);
    console.log(`${"=".repeat(60)}`);

    try {
      // Check for new content
      const checkResult = await checkForNewContent(ctx);
      
      if (checkResult.hasNewContent) {
        console.log(`\n✅ New content detected!`);
        
        // Scrape new content
        await scrapeNewContent(ctx, config.depth);
        ctx.lastCheckTime = new Date();
        ctx.postsCollected += checkResult.newPostCount;

        // Check if we should trigger alerts
        if (checkResult.newPostCount >= config.alertThreshold) {
          await sendAlert(ctx, "ACTIVITY_SPIKE", {
            postCount: checkResult.newPostCount,
            threshold: config.alertThreshold,
          });
        }

        // Trigger analysis if auto-ingest enabled
        if (config.autoIngest) {
          // Would fetch content IDs here
          await triggerAnalysis(ctx, []);
        }
      } else {
        console.log(`\n✅ No new content since ${ctx.lastCheckTime.toISOString()}`);
      }

      ctx.consecutiveFailures = 0;
    } catch (error) {
      ctx.consecutiveFailures++;
      console.error(`\n❌ Monitoring cycle failed:`, error);

      // Alert on consecutive failures
      if (ctx.consecutiveFailures >= 3) {
        await sendAlert(ctx, "MONITORING_FAILURE", {
          consecutiveFailures: ctx.consecutiveFailures,
          error: error instanceof Error ? error.message : "Unknown error",
        });
      }

      // Exponential backoff
      const backoffMs = Math.min(
        config.interval * 60 * 1000 * Math.pow(2, ctx.consecutiveFailures - 1),
        4 * 60 * 60 * 1000 // Max 4 hours
      );
      console.log(`[Monitor] Backing off for ${backoffMs / 1000}s`);
      await setTimeout(backoffMs);
      continue;
    }

    // Wait until next check
    const nextCheckMs = config.interval * 60 * 1000;
    console.log(`\n[Monitor] Next check in ${config.interval} minutes`);
    await setTimeout(nextCheckMs);
  }
}

// ─── Helper Functions ───────────────────────────────────────────────────────

function extractFacebookPageId(url: string): string {
  const patterns = [
    /facebook\.com\/(\d+)/,
    /facebook\.com\/people\/[^/]+\/(\d+)/,
    /facebook\.com\/p\/[^/]+\/(\d+)/,
  ];

  for (const pattern of patterns) {
    const match = url.match(pattern);
    if (match) {
      return match[1];
    }
  }

  try {
    const urlObj = new URL(url);
    return urlObj.pathname.split("/").filter(Boolean).pop() || randomUUID();
  } catch {
    return randomUUID();
  }
}

// ─── Main Entry Point ───────────────────────────────────────────────────────

async function main() {
  try {
    const config = parseArgs();
    await monitoringLoop(config);
  } catch (error) {
    console.error("\n💀 Monitor crashed:", error);
    process.exit(1);
  }
}

main();
