import React, { useCallback, useMemo, useState } from "react";
import { createRoot } from "react-dom/client";
import {
  BarChart,
  Bar,
  LineChart,
  Line,
  XAxis,
  YAxis,
  CartesianGrid,
  Tooltip,
  Legend,
  ResponsiveContainer,
  ReferenceLine,
  ReferenceDot,
} from "recharts";
import {
  AlertCircle,
  CheckCircle,
  TrendingUp,
  Clock,
  Zap,
  Activity,
  Plus,
  Trash2,
  Copy,
  Play,
} from "lucide-react";

// ============================================================================
// Min-heap keyed on `t` — O(log n) push/pop for the in-flight completion queue
// ============================================================================
class MinHeap {
  constructor() {
    this.a = [];
  }
  get size() {
    return this.a.length;
  }
  peek() {
    return this.a[0];
  }
  push(x) {
    const a = this.a;
    a.push(x);
    let i = a.length - 1;
    while (i > 0) {
      const p = (i - 1) >> 1;
      if (a[p].t <= a[i].t) break;
      [a[p], a[i]] = [a[i], a[p]];
      i = p;
    }
  }
  pop() {
    const a = this.a;
    if (a.length === 0) return undefined;
    const top = a[0];
    const last = a.pop();
    if (a.length > 0) {
      a[0] = last;
      const n = a.length;
      let i = 0;
      for (;;) {
        const l = 2 * i + 1,
          r = l + 1;
        let m = i;
        if (l < n && a[l].t < a[m].t) m = l;
        if (r < n && a[r].t < a[m].t) m = r;
        if (m === i) break;
        [a[m], a[i]] = [a[i], a[m]];
        i = m;
      }
    }
    return top;
  }
}

// ============================================================================
// Lognormal latency sampling.
//
// Real-world per-request latency is heavy-tailed; the previous version used
// uniform ±X% jitter, which the central limit theorem washes out at scale
// (P95 ≈ P50 for N=15k). Lognormal parameterized by coefficient of variation
// (CV = σ/μ) gives the long-tail behavior that actually matters:
//   CV=0.3 → P95/P50 ≈ 1.6×    (typical well-behaved API)
//   CV=0.5 → P95/P50 ≈ 2.2×    (some tail risk)
//   CV=1.0 → P95/P50 ≈ 3.9×    (heavy tail — slow requests common)
// ============================================================================
function lognormalSample(mean, cv) {
  if (cv <= 0) return mean;
  const sigma = Math.sqrt(Math.log(1 + cv * cv));
  const mu = Math.log(mean) - (sigma * sigma) / 2;
  // Box–Muller standard normal
  const u1 = Math.random() || 1e-10;
  const u2 = Math.random();
  const z = Math.sqrt(-2 * Math.log(u1)) * Math.cos(2 * Math.PI * u2);
  return Math.exp(mu + sigma * z);
}

// ============================================================================
// SIMULATOR — discrete-event, single client-side rate-limit gate.
//
// We model one strategy: a sliding-window cap of `rps` starts per second.
// All standard strategies (token bucket, leaky bucket, sliding window,
// adaptive) converge to throughput = min(rps, workers/duration) in steady
// state, so the comparison was a distinction without a difference. The
// simulator is here for two things the closed-form model can't give you:
//   (1) transient behavior — what the queue and RPS look like over time
//   (2) latency variance via lognormal jitter (Monte Carlo)
// ============================================================================
function simulate(scenario) {
  const {
    workerCount,
    avgRequestDuration: avgDur,
    apiRateLimit: rps,
    latencyCV,
    queueSize,
    maxCompletionTime,
  } = scenario;

  let now = 0;
  let pending = queueSize;
  let processed = 0;
  // Worker utilization = ∫ inFlight(t) dt / (W × totalTime). Accumulated by
  // multiplying current in-flight count by the time delta on each transition.
  let busyArea = 0;
  let lastBusyAt = 0;

  const inFlight = new MinHeap();
  // Sliding-window record of recent start timestamps. Head pointer instead of
  // array.shift() — keeps prune amortized O(1).
  const recentStarts = [];
  let head = 0;

  const queueData = [];
  const events = [];
  let nextSampleAt = 0;

  const recentCount = () => recentStarts.length - head;
  const pruneRecent = () => {
    while (head < recentStarts.length && recentStarts[head] < now - 1) head++;
  };
  const accumBusy = () => {
    busyArea += inFlight.size * (now - lastBusyAt);
    lastBusyAt = now;
  };
  const nextAllowedStart = () => {
    pruneRecent();
    return recentCount() < rps ? now : recentStarts[head] + 1;
  };
  const startOne = () => {
    accumBusy();
    pending--;
    const dur = Math.max(0.001, lognormalSample(avgDur, latencyCV));
    recentStarts.push(now);
    inFlight.push({ t: now + dur });
    if (events.length < 500) events.push({ startTime: now, duration: dur });
  };
  const completeOne = () => {
    accumBusy();
    inFlight.pop();
    processed++;
  };
  const sampleQueue = () => {
    if (now < nextSampleAt) return;
    queueData.push({ time: now, queueDepth: pending, inFlight: inFlight.size });
    nextSampleAt = now + 0.5;
  };

  const maxSimSeconds = Math.max(60, maxCompletionTime * 60 * 5);
  const maxIters = queueSize * 4 + 1000;
  let iters = 0;
  sampleQueue();

  while (
    (pending > 0 || inFlight.size > 0) &&
    now < maxSimSeconds &&
    iters++ < maxIters
  ) {
    const nextDone = inFlight.size > 0 ? inFlight.peek().t : Infinity;
    const canStartNow = pending > 0 && inFlight.size < workerCount;
    const nextStart = canStartNow ? nextAllowedStart() : Infinity;

    if (canStartNow && nextStart <= now + 1e-9 && nextStart <= nextDone) {
      while (
        pending > 0 &&
        inFlight.size < workerCount &&
        nextAllowedStart() <= now + 1e-9
      ) {
        startOne();
      }
      sampleQueue();
      continue;
    }

    const nextT = Math.min(nextStart, nextDone);
    if (!Number.isFinite(nextT) || nextT <= now) break;
    now = nextT;

    while (inFlight.size > 0 && inFlight.peek().t <= now + 1e-9) completeOne();
    sampleQueue();
  }
  accumBusy();

  const rpsBuckets = new Map();
  for (const e of events) {
    const b = Math.floor(e.startTime);
    rpsBuckets.set(b, (rpsBuckets.get(b) || 0) + 1);
  }
  const rpsData = [...rpsBuckets.entries()]
    .sort((a, b) => a[0] - b[0])
    .map(([time, rps]) => ({ time, rps }));

  const completionTime = now / 60;
  return {
    completionTime,
    completionTimeSeconds: now,
    processed,
    remaining: pending,
    achievedRPS: now > 0 ? processed / now : 0,
    workerUtilization: now > 0 ? busyArea / (workerCount * now) : 0,
    viable:
      processed >= queueSize * 0.99 && completionTime <= maxCompletionTime,
    queueData,
    rpsData,
  };
}

function runMonteCarlo(scenario, iterations) {
  if (iterations <= 0) return null;
  const distribution = [];
  for (let i = 0; i < iterations; i++) {
    distribution.push(simulate(scenario).completionTime);
  }
  distribution.sort((a, b) => a - b);
  const at = (p) =>
    distribution[
      Math.min(distribution.length - 1, Math.floor(distribution.length * p))
    ];
  return {
    distribution,
    mean: distribution.reduce((a, b) => a + b, 0) / distribution.length,
    p50: at(0.5),
    p95: at(0.95),
    p99: at(0.99),
    min: distribution[0],
    max: distribution[distribution.length - 1],
  };
}

// CV sensitivity sweep — runs Monte Carlo at a range of latency CVs.
//
// This replaces the prior single-CV distribution histogram, which was
// uninformative: when the rate limit binds, drain time ≈ N/R + (variance of
// the *last* request only), so the histogram stayed tight at every CV. The
// useful question isn't "what's the distribution at this CV?" but "does
// latency variance affect drain time at all in my regime?" — and that depends
// on the binding constraint:
//   - Rate-limit-bound: flat curve. The metronome dominates.
//   - Worker-bound:     upward slope. Slow requests hold worker slots,
//                       reducing effective throughput.
const CV_SWEEP_VALUES = [0, 0.1, 0.2, 0.3, 0.5, 0.75, 1.0, 1.5, 2.0];

function runCVSensitivity(scenario, iterationsPerPoint) {
  const out = [];
  for (const cv of CV_SWEEP_VALUES) {
    const cfg = { ...scenario, latencyCV: cv };
    const ts = [];
    for (let i = 0; i < iterationsPerPoint; i++) {
      ts.push(simulate(cfg).completionTime);
    }
    ts.sort((a, b) => a - b);
    const at = (p) => ts[Math.min(ts.length - 1, Math.floor(ts.length * p))];
    out.push({
      cv,
      mean: ts.reduce((a, b) => a + b, 0) / ts.length,
      p95: at(0.95),
    });
  }
  return out;
}

// ============================================================================
// CLOSED-FORM ANALYSIS — no simulation needed.
//
// In steady state, throughput is limited by min(rps, workers/duration). This
// is the headline number; the simulator agrees with it within ~1% but is
// slower and noisier. The closed form is what drives the sensitivity sweeps.
// ============================================================================
function analyzeQueue(scenario) {
  const {
    workerCount: W,
    avgRequestDuration: D,
    apiRateLimit: R,
    queueSize: N,
  } = scenario;
  const workerMaxThroughput = W / D;
  const effective = Math.min(R, workerMaxThroughput);
  const minTimeMin = N / effective / 60;

  // Worker utilization in steady state. If workers are the bottleneck, every
  // worker is always busy. If the API limit is, only `R*D` workers are needed.
  const utilization = Math.min(1, (effective * D) / W);

  let bottleneck;
  const skew = workerMaxThroughput / R;
  if (skew > 1.05) bottleneck = "API RATE LIMIT";
  else if (skew < 0.95) bottleneck = "WORKERS";
  else bottleneck = "BALANCED";

  // Concrete recommendation, expressed in the user's controls.
  let recommendation;
  if (bottleneck === "WORKERS") {
    const target = Math.ceil(R * D);
    recommendation = `Workers are the bottleneck — adding ${target - W} more workers (to ${target}) would saturate the API limit and minimize completion time.`;
  } else if (bottleneck === "API RATE LIMIT") {
    const idle = W - Math.ceil(R * D);
    recommendation = `The API limit is the bottleneck — about ${idle} of your ${W} workers are idle on average. Negotiating a higher rate limit is the only way to go faster.`;
  } else {
    recommendation = `Workers and API limit are matched — to scale throughput you need to increase both proportionally.`;
  }

  return {
    workerMaxThroughput,
    effective,
    minTimeMin,
    utilization,
    bottleneck,
    recommendation,
  };
}

// ============================================================================
// SENSITIVITY SWEEPS — closed-form, near-instant.
//
// These answer the "what if I add 5 workers?" / "what if I get 2× the rate
// limit?" questions directly, instead of forcing the user to edit the config
// and re-run.
// ============================================================================
function workerSensitivity(scenario) {
  const {
    avgRequestDuration: D,
    apiRateLimit: R,
    queueSize: N,
    workerCount: W,
  } = scenario;
  const max = Math.max(W * 2, Math.ceil(R * D * 1.5), 5);
  const out = [];
  for (let w = 1; w <= max; w++) {
    const effective = Math.min(R, w / D);
    out.push({ workers: w, time: N / effective / 60 });
  }
  return out;
}

function rateLimitSensitivity(scenario) {
  const {
    workerCount: W,
    avgRequestDuration: D,
    queueSize: N,
    apiRateLimit: R,
  } = scenario;
  const max = Math.max(R * 2, Math.ceil((W / D) * 1.5));
  const POINTS = 50;
  const step = max / POINTS;
  const out = [];
  for (let i = 1; i <= POINTS; i++) {
    const r = step * i;
    const effective = Math.min(r, W / D);
    out.push({ rps: Math.round(r * 10) / 10, time: N / effective / 60 });
  }
  return out;
}

// ============================================================================
// COMPONENT
// ============================================================================
const DEFAULT_SCENARIO = {
  name: "Current State",
  queueSize: 15000,
  apiRateLimit: 100,
  avgRequestDuration: 0.2,
  latencyCV: 0.3,
  workerCount: 20,
  maxCompletionTime: 60,
  active: true,
};

const QueueAnalyzer = () => {
  const [scenarios, setScenarios] = useState([{ id: 1, ...DEFAULT_SCENARIO }]);
  const [selectedScenario, setSelectedScenario] = useState(1);
  const [compareMode, setCompareMode] = useState(false);
  const [monteCarloIterations, setMonteCarloIterations] = useState(50);
  const [runVersion, setRunVersion] = useState(0);

  const runSimulations = useCallback(() => setRunVersion((v) => v + 1), []);

  const results = useMemo(() => {
    if (runVersion === 0) return {};
    const out = {};
    // Split the iteration budget: keep most of it on a single-CV Monte Carlo
    // (powers the headline P50/P95/P99) and reserve a smaller, equal share
    // for each point of the CV sweep. ~6 sims per CV × 9 CVs is plenty for
    // a curve where adjacent points differ by far more than sampling noise.
    const cvSweepBudget = Math.max(
      4,
      Math.floor(monteCarloIterations / CV_SWEEP_VALUES.length / 2),
    );
    for (const scenario of scenarios) {
      const sim = simulate(scenario);
      const monteCarlo = runMonteCarlo(scenario, monteCarloIterations);
      const cvSweep =
        monteCarloIterations > 0
          ? runCVSensitivity(scenario, cvSweepBudget)
          : null;
      const analysis = analyzeQueue(scenario);
      const workerSweep = workerSensitivity(scenario);
      const rateSweep = rateLimitSensitivity(scenario);
      out[scenario.id] = {
        sim,
        monteCarlo,
        cvSweep,
        analysis,
        workerSweep,
        rateSweep,
      };
    }
    return out;
    // eslint-disable-next-line react-hooks/exhaustive-deps
  }, [runVersion]);

  const addScenario = () => {
    const newId = Math.max(...scenarios.map((s) => s.id)) + 1;
    const base =
      scenarios.find((s) => s.id === selectedScenario) || scenarios[0];
    setScenarios([
      ...scenarios,
      { ...base, id: newId, name: `Scenario ${newId}`, active: true },
    ]);
  };
  const deleteScenario = (id) => {
    if (scenarios.length <= 1) return;
    const remaining = scenarios.filter((s) => s.id !== id);
    setScenarios(remaining);
    if (selectedScenario === id) setSelectedScenario(remaining[0].id);
  };
  const duplicateScenario = (id) => {
    const newId = Math.max(...scenarios.map((s) => s.id)) + 1;
    const src = scenarios.find((s) => s.id === id);
    setScenarios([
      ...scenarios,
      { ...src, id: newId, name: `${src.name} (Copy)` },
    ]);
  };
  const updateScenario = (id, field, value) =>
    setScenarios(
      scenarios.map((s) => (s.id === id ? { ...s, [field]: value } : s)),
    );
  const toggleScenarioActive = (id) =>
    setScenarios(
      scenarios.map((s) => (s.id === id ? { ...s, active: !s.active } : s)),
    );

  const currentScenario = scenarios.find((s) => s.id === selectedScenario);
  const currentResults = results[selectedScenario];
  const hasRun = runVersion > 0;

  return (
    <div className="w-full max-w-[1800px] mx-auto p-6 bg-gray-50">
      <div className="bg-white rounded-lg shadow-lg p-6 mb-6 flex items-center justify-between gap-4 flex-wrap">
        <div>
          <h1 className="text-3xl font-bold mb-2">
            Rate-Limited Queue Analyzer
          </h1>
          <p className="text-gray-600">
            Estimate time-to-drain for a queue of API calls bounded by a worker
            pool and an upstream rate limit.
          </p>
        </div>
        <button
          onClick={runSimulations}
          className="px-6 py-3 bg-blue-600 text-white rounded-lg flex items-center gap-2 hover:bg-blue-700"
        >
          <Play size={18} />
          {hasRun ? "Re-run" : "Run Simulation"}
        </button>
      </div>

      <ScenarioCards
        scenarios={scenarios}
        selectedScenario={selectedScenario}
        compareMode={compareMode}
        results={results}
        onSelect={setSelectedScenario}
        onAdd={addScenario}
        onDelete={deleteScenario}
        onDuplicate={duplicateScenario}
        onUpdate={updateScenario}
        onToggleActive={toggleScenarioActive}
        onToggleCompareMode={() => setCompareMode((m) => !m)}
      />

      {!compareMode && currentScenario && (
        <ConfigurationPanel
          scenario={currentScenario}
          monteCarloIterations={monteCarloIterations}
          onUpdate={updateScenario}
          onUpdateMonteCarlo={setMonteCarloIterations}
        />
      )}

      {!hasRun && (
        <div className="bg-white rounded-lg shadow-lg p-12 mb-6 text-center text-gray-500">
          <Play size={48} className="mx-auto mb-3 opacity-30" />
          <p className="text-lg">Click Run Simulation to compute results.</p>
          <p className="text-sm mt-2">
            Edits to scenarios won't trigger a re-run automatically.
          </p>
        </div>
      )}

      {hasRun && !compareMode && currentScenario && currentResults && (
        <SingleScenarioResults
          scenario={currentScenario}
          results={currentResults}
          monteCarloIterations={monteCarloIterations}
        />
      )}

      {hasRun && compareMode && (
        <CompareModeResults
          scenarios={scenarios.filter((s) => s.active)}
          results={results}
        />
      )}
    </div>
  );
};

// ----------------------------------------------------------------------------
// Subcomponents
// ----------------------------------------------------------------------------

const Field = ({ label, help, children }) => (
  <div>
    <label className="block text-sm font-medium text-gray-700 mb-2">
      {label}
    </label>
    {children}
    {help && <p className="text-xs text-gray-500 mt-1">{help}</p>}
  </div>
);

const Card = ({ icon, color, title, value, sub }) => (
  <div className="bg-white rounded-lg shadow-lg p-6">
    <div className="flex items-center mb-2">
      <span className={`mr-2 ${color}`}>{icon}</span>
      <h3 className="text-lg font-semibold">{title}</h3>
    </div>
    <p className="text-2xl font-bold mb-1">{value}</p>
    {sub && <div className="text-sm text-gray-600">{sub}</div>}
  </div>
);

const Section = ({ title, subtitle, children }) => (
  <div className="bg-white rounded-lg shadow-lg p-6 mb-6">
    <h2 className="text-xl font-semibold mb-1">{title}</h2>
    {subtitle && <p className="text-sm text-gray-500 mb-4">{subtitle}</p>}
    {!subtitle && <div className="mb-4" />}
    {children}
  </div>
);

const Stat = ({ label, value }) => (
  <div className="text-center">
    <div className="font-semibold">{label}</div>
    <div>{value}</div>
  </div>
);

const ScenarioCards = ({
  scenarios,
  selectedScenario,
  compareMode,
  results,
  onSelect,
  onAdd,
  onDelete,
  onDuplicate,
  onUpdate,
  onToggleActive,
  onToggleCompareMode,
}) => (
  <div className="bg-white rounded-lg shadow-lg p-6 mb-6">
    <div className="flex items-center justify-between mb-4">
      <h2 className="text-xl font-semibold">Scenarios</h2>
      <div className="flex gap-2">
        <button
          onClick={onToggleCompareMode}
          className={`px-4 py-2 rounded ${compareMode ? "bg-blue-600 text-white" : "bg-gray-200"}`}
        >
          Compare Mode
        </button>
        <button
          onClick={onAdd}
          className="px-4 py-2 bg-green-600 text-white rounded flex items-center gap-2"
        >
          <Plus size={16} /> Add Scenario
        </button>
      </div>
    </div>

    <div className="grid grid-cols-1 md:grid-cols-2 lg:grid-cols-3 gap-4">
      {scenarios.map((scenario) => {
        const r = results[scenario.id];
        return (
          <div
            key={scenario.id}
            onClick={() => onSelect(scenario.id)}
            className={`border-2 rounded-lg p-4 cursor-pointer ${
              selectedScenario === scenario.id
                ? "border-blue-500 bg-blue-50"
                : "border-gray-300"
            } ${scenario.active || !compareMode ? "" : "opacity-50"}`}
          >
            <div className="flex items-center justify-between mb-2 gap-2">
              <input
                type="text"
                value={scenario.name}
                onChange={(e) => onUpdate(scenario.id, "name", e.target.value)}
                onClick={(e) => e.stopPropagation()}
                className="font-semibold text-lg bg-transparent border-none focus:outline-none w-full min-w-0"
              />
              <div className="flex gap-2 flex-shrink-0 items-center">
                {compareMode && (
                  <input
                    type="checkbox"
                    checked={scenario.active}
                    onChange={() => onToggleActive(scenario.id)}
                    onClick={(e) => e.stopPropagation()}
                    className="w-5 h-5"
                  />
                )}
                <button
                  onClick={(e) => {
                    e.stopPropagation();
                    onDuplicate(scenario.id);
                  }}
                  className="text-blue-600 hover:text-blue-800"
                >
                  <Copy size={16} />
                </button>
                {scenarios.length > 1 && (
                  <button
                    onClick={(e) => {
                      e.stopPropagation();
                      onDelete(scenario.id);
                    }}
                    className="text-red-600 hover:text-red-800"
                  >
                    <Trash2 size={16} />
                  </button>
                )}
              </div>
            </div>
            <div className="text-sm space-y-1 text-gray-600">
              <div>{scenario.queueSize.toLocaleString()} requests</div>
              <div>
                {scenario.workerCount} workers · {scenario.apiRateLimit} RPS
                limit · {scenario.avgRequestDuration}s/req
              </div>
              {r && (
                <div className="mt-2 font-semibold text-gray-800">
                  {r.sim.completionTime.toFixed(1)} min
                  {r.monteCarlo && (
                    <span className="text-xs ml-1">
                      (P95: {r.monteCarlo.p95.toFixed(1)}m)
                    </span>
                  )}
                  <span className="text-xs ml-2 text-gray-500">
                    · {r.analysis.bottleneck}
                  </span>
                </div>
              )}
            </div>
          </div>
        );
      })}
    </div>
  </div>
);

const ConfigurationPanel = ({
  scenario,
  monteCarloIterations,
  onUpdate,
  onUpdateMonteCarlo,
}) => (
  <div className="bg-white rounded-lg shadow-lg p-6 mb-6">
    <h2 className="text-xl font-semibold mb-4">
      Configuration: {scenario.name}
    </h2>
    <div className="grid grid-cols-1 md:grid-cols-3 lg:grid-cols-4 gap-4">
      <Field label="Queue Size" help="Total requests to process">
        <input
          type="number"
          min="1"
          value={scenario.queueSize}
          onChange={(e) =>
            onUpdate(scenario.id, "queueSize", Number(e.target.value))
          }
          className="w-full px-3 py-2 border border-gray-300 rounded-md"
        />
      </Field>
      <Field
        label="API Rate Limit (RPS)"
        help="Upstream cap on starts per second"
      >
        <input
          type="number"
          min="1"
          value={scenario.apiRateLimit}
          onChange={(e) =>
            onUpdate(scenario.id, "apiRateLimit", Number(e.target.value))
          }
          className="w-full px-3 py-2 border border-gray-300 rounded-md"
        />
      </Field>
      <Field label="Avg Request Duration (s)" help="Mean per-request latency">
        <input
          type="number"
          step="0.05"
          min="0.01"
          value={scenario.avgRequestDuration}
          onChange={(e) =>
            onUpdate(scenario.id, "avgRequestDuration", Number(e.target.value))
          }
          className="w-full px-3 py-2 border border-gray-300 rounded-md"
        />
      </Field>
      <Field
        label="Latency Variability (CV)"
        help="0=constant · 0.3=mild · 0.5=noticeable · 1.0=heavy tail"
      >
        <input
          type="number"
          step="0.05"
          min="0"
          max="3"
          value={scenario.latencyCV}
          onChange={(e) =>
            onUpdate(scenario.id, "latencyCV", Number(e.target.value))
          }
          className="w-full px-3 py-2 border border-gray-300 rounded-md"
        />
      </Field>
      <Field label="Worker Count" help="Concurrent in-flight requests">
        <input
          type="number"
          min="1"
          value={scenario.workerCount}
          onChange={(e) =>
            onUpdate(scenario.id, "workerCount", Number(e.target.value))
          }
          className="w-full px-3 py-2 border border-gray-300 rounded-md"
        />
      </Field>
      <Field
        label="SLA Completion Time (min)"
        help="Target time-to-drain for the viability check"
      >
        <input
          type="number"
          min="1"
          value={scenario.maxCompletionTime}
          onChange={(e) =>
            onUpdate(scenario.id, "maxCompletionTime", Number(e.target.value))
          }
          className="w-full px-3 py-2 border border-gray-300 rounded-md"
        />
      </Field>
      <Field label="Monte Carlo Iterations" help="0 disables variance modeling">
        <input
          type="number"
          min="0"
          max="500"
          step="10"
          value={monteCarloIterations}
          onChange={(e) => onUpdateMonteCarlo(Number(e.target.value))}
          className="w-full px-3 py-2 border border-gray-300 rounded-md"
        />
      </Field>
    </div>
  </div>
);

const fmtPct = (x) => `${(x * 100).toFixed(0)}%`;

const SingleScenarioResults = ({ scenario, results, monteCarloIterations }) => {
  const {
    sim,
    monteCarlo: mc,
    cvSweep,
    analysis,
    workerSweep,
    rateSweep,
  } = results;
  const bottleneckColor =
    analysis.bottleneck === "WORKERS"
      ? "text-orange-500"
      : analysis.bottleneck === "API RATE LIMIT"
        ? "text-purple-500"
        : "text-blue-500";

  return (
    <>
      <div className="grid grid-cols-1 md:grid-cols-4 gap-6 mb-6">
        <Card
          icon={<Clock size={24} />}
          color="text-blue-500"
          title="Time to Drain"
          value={`${sim.completionTime.toFixed(1)} min`}
          sub={
            mc && (
              <>
                <div>P50: {mc.p50.toFixed(1)}m</div>
                <div>P95: {mc.p95.toFixed(1)}m</div>
                <div>P99: {mc.p99.toFixed(1)}m</div>
              </>
            )
          }
        />
        <Card
          icon={<TrendingUp size={24} />}
          color={bottleneckColor}
          title="Bottleneck"
          value={analysis.bottleneck}
          sub={`Theoretical max throughput: ${analysis.effective.toFixed(1)} RPS`}
        />
        <Card
          icon={<Activity size={24} />}
          color="text-emerald-500"
          title="Worker Utilization"
          value={fmtPct(analysis.utilization)}
          sub={
            analysis.utilization < 0.95
              ? `${Math.round((1 - analysis.utilization) * scenario.workerCount)} workers idle on average`
              : "All workers busy"
          }
        />
        <Card
          icon={
            sim.viable ? <CheckCircle size={24} /> : <AlertCircle size={24} />
          }
          color={sim.viable ? "text-green-500" : "text-red-500"}
          title="Within SLA"
          value={sim.viable ? "YES" : "NO"}
          sub={`${sim.processed.toLocaleString()} / ${scenario.queueSize.toLocaleString()} processed in ≤ ${scenario.maxCompletionTime}m`}
        />
      </div>

      <div className="bg-white border-l-4 border-blue-500 p-4 mb-6 rounded shadow-sm">
        <p className="text-gray-800">{analysis.recommendation}</p>
      </div>

      <Section
        title="Sensitivity: Workers"
        subtitle="How much does the worker count change time-to-drain? The curve flattens at the 'knee' where additional workers can no longer be fed by the API rate limit."
      >
        <ResponsiveContainer width="100%" height={300}>
          <LineChart data={workerSweep}>
            <CartesianGrid strokeDasharray="3 3" />
            <XAxis
              dataKey="workers"
              label={{ value: "Workers", position: "insideBottom", offset: -5 }}
            />
            <YAxis
              label={{
                value: "Time to Drain (min)",
                angle: -90,
                position: "insideLeft",
              }}
            />
            <Tooltip formatter={(v) => `${Number(v).toFixed(2)} min`} />
            <Line
              type="monotone"
              dataKey="time"
              stroke="#3b82f6"
              strokeWidth={2}
              dot={false}
            />
            <ReferenceLine
              x={scenario.workerCount}
              stroke="#ef4444"
              strokeDasharray="3 3"
              label={{ value: "Current", position: "top" }}
            />
            <ReferenceLine
              y={scenario.maxCompletionTime}
              stroke="#f59e0b"
              strokeDasharray="3 3"
              label={{ value: "SLA", position: "right" }}
            />
          </LineChart>
        </ResponsiveContainer>
      </Section>

      <Section
        title="Sensitivity: API Rate Limit"
        subtitle="If you can negotiate a higher rate limit, how much does it help? The line is flat past the point where workers become the bottleneck."
      >
        <ResponsiveContainer width="100%" height={300}>
          <LineChart data={rateSweep}>
            <CartesianGrid strokeDasharray="3 3" />
            <XAxis
              dataKey="rps"
              label={{
                value: "API Rate Limit (RPS)",
                position: "insideBottom",
                offset: -5,
              }}
            />
            <YAxis
              label={{
                value: "Time to Drain (min)",
                angle: -90,
                position: "insideLeft",
              }}
            />
            <Tooltip formatter={(v) => `${Number(v).toFixed(2)} min`} />
            <Line
              type="monotone"
              dataKey="time"
              stroke="#8b5cf6"
              strokeWidth={2}
              dot={false}
            />
            <ReferenceLine
              x={scenario.apiRateLimit}
              stroke="#ef4444"
              strokeDasharray="3 3"
              label={{ value: "Current", position: "top" }}
            />
            <ReferenceLine
              y={scenario.maxCompletionTime}
              stroke="#f59e0b"
              strokeDasharray="3 3"
              label={{ value: "SLA", position: "right" }}
            />
          </LineChart>
        </ResponsiveContainer>
      </Section>

      <Section
        title="Queue Depth & In-Flight Requests Over Time"
        subtitle="Pending queue (orange) drops linearly while in-flight (purple) sits at the smaller of worker count or rate-limit-allowed concurrency."
      >
        <ResponsiveContainer width="100%" height={300}>
          <LineChart data={sim.queueData}>
            <CartesianGrid strokeDasharray="3 3" />
            <XAxis
              dataKey="time"
              label={{
                value: "Time (s)",
                position: "insideBottom",
                offset: -5,
              }}
            />
            <YAxis
              label={{ value: "Count", angle: -90, position: "insideLeft" }}
            />
            <Tooltip />
            <Legend />
            <Line
              type="monotone"
              dataKey="queueDepth"
              stroke="#f59e0b"
              name="Queue Depth"
              strokeWidth={2}
              dot={false}
            />
            <Line
              type="monotone"
              dataKey="inFlight"
              stroke="#8b5cf6"
              name="In-Flight"
              strokeWidth={2}
              dot={false}
            />
            <ReferenceLine
              y={scenario.workerCount}
              stroke="#ef4444"
              strokeDasharray="3 3"
              label="Worker Limit"
            />
          </LineChart>
        </ResponsiveContainer>
      </Section>

      <Section
        title="Achieved RPS Over Time"
        subtitle="Per-second start counts. Should sit at min(rate limit, workers/duration) once steady state is reached."
      >
        <ResponsiveContainer width="100%" height={300}>
          <BarChart data={sim.rpsData}>
            <CartesianGrid strokeDasharray="3 3" />
            <XAxis
              dataKey="time"
              label={{
                value: "Time (s)",
                position: "insideBottom",
                offset: -5,
              }}
            />
            <YAxis
              label={{ value: "RPS", angle: -90, position: "insideLeft" }}
            />
            <Tooltip />
            <Legend />
            <Bar dataKey="rps" fill="#3b82f6" name="RPS" />
            <ReferenceLine
              y={scenario.apiRateLimit}
              stroke="#ef4444"
              strokeDasharray="3 3"
              label="Rate Limit"
            />
          </BarChart>
        </ResponsiveContainer>
      </Section>

      {cvSweep && (
        <Section
          title="Variance Sensitivity (CV Sweep)"
          subtitle="How much does latency variability affect drain time? A flat curve means you're rate-limit-bound — the metronome paces starts regardless of per-request jitter, so total drain time barely moves with CV. An upward slope means you're worker-bound: slow requests hold worker slots, reducing effective throughput as variance grows."
        >
          <ResponsiveContainer width="100%" height={300}>
            <LineChart data={cvSweep}>
              <CartesianGrid strokeDasharray="3 3" />
              <XAxis
                dataKey="cv"
                type="number"
                domain={[0, "dataMax"]}
                label={{
                  value: "Latency CV (σ/μ)",
                  position: "insideBottom",
                  offset: -5,
                }}
              />
              <YAxis
                label={{
                  value: "Drain Time (min)",
                  angle: -90,
                  position: "insideLeft",
                }}
              />
              <Tooltip
                formatter={(v) => `${Number(v).toFixed(2)} min`}
                labelFormatter={(v) => `CV = ${v}`}
              />
              <Legend />
              <Line
                type="monotone"
                dataKey="mean"
                stroke="#3b82f6"
                name="Mean"
                strokeWidth={2}
                dot={{ r: 3 }}
              />
              <Line
                type="monotone"
                dataKey="p95"
                stroke="#8b5cf6"
                name="P95"
                strokeWidth={2}
                dot={{ r: 3 }}
              />
              <ReferenceLine
                x={scenario.latencyCV}
                stroke="#ef4444"
                strokeDasharray="3 3"
                label={{ value: "Current", position: "top" }}
              />
              <ReferenceLine
                y={scenario.maxCompletionTime}
                stroke="#f59e0b"
                strokeDasharray="3 3"
                label={{ value: "SLA", position: "right" }}
              />
            </LineChart>
          </ResponsiveContainer>
          {mc && (
            <div className="mt-4 grid grid-cols-5 gap-4 text-sm">
              <Stat label="Min" value={`${mc.min.toFixed(2)}m`} />
              <Stat label="P50" value={`${mc.p50.toFixed(2)}m`} />
              <Stat label="Mean" value={`${mc.mean.toFixed(2)}m`} />
              <Stat label="P95" value={`${mc.p95.toFixed(2)}m`} />
              <Stat label="P99" value={`${mc.p99.toFixed(2)}m`} />
            </div>
          )}
          {mc && (
            <p className="text-xs text-gray-500 mt-2 text-right">
              Stats above are at your current CV ({scenario.latencyCV}), from{" "}
              {monteCarloIterations} simulations.
            </p>
          )}
        </Section>
      )}
    </>
  );
};

const CompareModeResults = ({ scenarios, results }) => (
  <div className="bg-white rounded-lg shadow-lg p-6 mb-6">
    <h2 className="text-xl font-semibold mb-4">Scenario Comparison</h2>
    <div className="overflow-x-auto">
      <table className="w-full text-sm">
        <thead>
          <tr className="border-b-2 border-gray-300">
            <th className="text-left py-3 px-4">Scenario</th>
            <th className="text-right py-3 px-4">Queue</th>
            <th className="text-right py-3 px-4">Workers</th>
            <th className="text-right py-3 px-4">RPS Limit</th>
            <th className="text-right py-3 px-4">Time</th>
            <th className="text-right py-3 px-4">P95</th>
            <th className="text-right py-3 px-4">Utilization</th>
            <th className="text-right py-3 px-4">Bottleneck</th>
            <th className="text-center py-3 px-4">SLA</th>
          </tr>
        </thead>
        <tbody>
          {scenarios.map((scenario) => {
            const r = results[scenario.id];
            if (!r) return null;
            return (
              <tr
                key={scenario.id}
                className="border-b border-gray-200 hover:bg-gray-50"
              >
                <td className="py-3 px-4 font-semibold">{scenario.name}</td>
                <td className="text-right py-3 px-4">
                  {scenario.queueSize.toLocaleString()}
                </td>
                <td className="text-right py-3 px-4">{scenario.workerCount}</td>
                <td className="text-right py-3 px-4">
                  {scenario.apiRateLimit}
                </td>
                <td className="text-right py-3 px-4">
                  {r.sim.completionTime.toFixed(1)}m
                </td>
                <td className="text-right py-3 px-4">
                  {r.monteCarlo ? `${r.monteCarlo.p95.toFixed(1)}m` : "—"}
                </td>
                <td className="text-right py-3 px-4">
                  {fmtPct(r.analysis.utilization)}
                </td>
                <td className="text-right py-3 px-4 text-xs">
                  {r.analysis.bottleneck}
                </td>
                <td className="text-center py-3 px-4">
                  {r.sim.viable ? (
                    <span className="text-green-600">✓</span>
                  ) : (
                    <span className="text-red-600">✗</span>
                  )}
                </td>
              </tr>
            );
          })}
        </tbody>
      </table>
    </div>

    <div className="mt-6">
      <h3 className="text-lg font-semibold mb-4">Time to Drain Comparison</h3>
      <ResponsiveContainer width="100%" height={300}>
        <BarChart
          data={scenarios.map((s) => {
            const r = results[s.id];
            return {
              name: s.name,
              mean: r?.sim.completionTime || 0,
              p95: r?.monteCarlo?.p95 || 0,
            };
          })}
        >
          <CartesianGrid strokeDasharray="3 3" />
          <XAxis dataKey="name" />
          <YAxis
            label={{ value: "Minutes", angle: -90, position: "insideLeft" }}
          />
          <Tooltip />
          <Legend />
          <Bar dataKey="mean" fill="#3b82f6" name="Mean" />
          <Bar dataKey="p95" fill="#8b5cf6" name="P95" />
          <ReferenceLine
            y={scenarios[0]?.maxCompletionTime || 60}
            stroke="#ef4444"
            strokeDasharray="3 3"
            label="SLA"
          />
        </BarChart>
      </ResponsiveContainer>
    </div>
  </div>
);

export default QueueAnalyzer;

// Auto-mount when loaded into a page that has a #root element. Lets this file
// double as both an importable component and a self-bootstrapping entry.
const rootEl =
  typeof document !== "undefined" ? document.getElementById("root") : null;
if (rootEl) createRoot(rootEl).render(<QueueAnalyzer />);
