/* Equiti+ Voice Overlay v3 — gpt-realtime-2 + marin
 * ===================================================
 * 5-state hybrid surface (driven by `mode` prop):
 *
 *   'idle'      — not mounted (returns null). FAB-only screen.
 *   'decision'  — FAB in its bottom-center anchor at 110×110 (replica
 *                 of voiceMini); a static low-opacity glow halo
 *                 frames it. A text-input pill transitions in on
 *                 the LEFT, and a "↶ or press here to talk" hint
 *                 transitions in on the RIGHT. App stays
 *                 navigable — no scrim, no panel. The user picks
 *                 chat (tap text box) or voice (tap FAB).
 *   'chat'      — chat panel slides up from the very bottom (cash-
 *                 flow-sheet design language: white sheet with
 *                 24px top radius, drag handle, scrim). FAB
 *                 simultaneously animates from bottom-center to
 *                 bottom-RIGHT corner at 76×76. Inside the panel:
 *                 header → transcript → iOS-style chat input pill
 *                 at the bottom.
 *   'voice'     — same panel as chat. FAB lifts up to panel centre
 *                 at 110×110. Glow rings pulse only while
 *                 transcription is happening. X next to FAB.
 *                 Listening caption near panel bottom.
 *   'voiceMini' — voice still running, panel collapsed. FAB at
 *                 ORIGINAL bottom-center anchor (centre at 88px
 *                 from frame bottom — exactly where the idle FAB
 *                 sat), 110×110, X next to it, glow centred on
 *                 FAB. App fully navigable.
 *
 * The FAB itself is owned by VoiceFAB. This file paints all the
 * surrounding affordances (panel, scrim, text bar, voice hint, X,
 * glow) and consumes the realtime client's transcript stream. */

(function (global) {
  'use strict';

  var React = global.React;
  if (!React) {
    console.error('[VoiceOverlay] React not loaded yet');
    return;
  }
  var useState = React.useState;
  var useEffect = React.useEffect;
  var useRef = React.useRef;
  var useCallback = React.useCallback;

  var EASE = 'cubic-bezier(0.32, 0.72, 0.24, 1)';
  /* Sheet animation tempo — 800ms is "very slow + extremely smooth"
     per the user's brief. Same easing as cash-flow-sheet so the
     deposit/withdraw popups and this voice surface share a vocab. */
  var ANIM_MS = 800;
  /* Faster fade for the smaller decoration elements (text bar,
     voice hint, glow) so they don't lag the panel. */
  var DECORATION_MS = 360;

  /* FAB centre coordinates, mirrored from voice-fab.jsx anchors. */
  var FAB_CENTRE_BOTTOM_Y = 88;       /* idle / decision / voiceMini */
  var FAB_CENTRE_PANEL_Y = 380;       /* voice mode (panel-centre) */

  /* ── Inject keyframes once. */
  if (typeof document !== 'undefined' && !document.getElementById('voice-overlay-v3-keyframes')) {
    var style = document.createElement('style');
    style.id = 'voice-overlay-v3-keyframes';
    style.textContent = [
      '@keyframes voice-glow-1 { 0% { transform: scale(1.05); opacity: 0.55; } 50% { transform: scale(1.20); opacity: 0.30; } 100% { transform: scale(1.05); opacity: 0.55; } }',
      '@keyframes voice-glow-2 { 0% { transform: scale(1.05); opacity: 0.55; } 50% { transform: scale(1.22); opacity: 0.28; } 100% { transform: scale(1.05); opacity: 0.55; } }',
      '@keyframes voice-glow-3 { 0% { transform: scale(1.05); opacity: 0.50; } 50% { transform: scale(1.25); opacity: 0.22; } 100% { transform: scale(1.05); opacity: 0.50; } }',
      '@keyframes voice-live-dot { 0%, 100% { transform: scale(1); opacity: 1; } 50% { transform: scale(1.2); opacity: 0.7; } }',
      '@keyframes voice-hint-bob { 0%, 100% { transform: translateX(0); } 50% { transform: translateX(-4px); } }',
      /* Transitions.dev text-swap: text drifts in from below with a
         slight blur + opacity sweep. Used for chat bubble text on first
         appearance so messages slide cleanly in (faster + crisper than
         per-word stagger). */
      ':root { --text-swap-dur: 200ms; --text-swap-translate-y: 8px; --text-swap-blur: 2px; --text-swap-ease: ease-out; }',
      '.t-text-swap { display: inline-block; transform: translateY(0); filter: blur(0); opacity: 1; transition: transform var(--text-swap-dur) var(--text-swap-ease), filter var(--text-swap-dur) var(--text-swap-ease), opacity var(--text-swap-dur) var(--text-swap-ease); will-change: transform, filter, opacity; }',
      '.t-text-swap.is-exit { transform: translateY(calc(var(--text-swap-translate-y) * -1)); filter: blur(var(--text-swap-blur)); opacity: 0; }',
      '.t-text-swap.is-enter-start { transform: translateY(var(--text-swap-translate-y)); filter: blur(var(--text-swap-blur)); opacity: 0; transition: none; }',
      '@media (prefers-reduced-motion: reduce) { .t-text-swap { transition: none !important; } }',
    ].join('\n');
    document.head.appendChild(style);
  }

  /* BubbleText — text-swap ENTER phase only for new chat bubbles. Mounts
     with `is-enter-start` applied (offset + blur + opacity 0, no
     transition), then drops the class on the next frame so the default
     transition runs the text back to rest. Streaming partials update
     the text prop in place without re-animating. */
  function BubbleText(props) {
    var text = props.text || '';
    var enteredState = useState(false);
    var entered = enteredState[0], setEntered = enteredState[1];
    useEffect(function () {
      if (entered) return;
      var raf = requestAnimationFrame(function () { setEntered(true); });
      return function () { cancelAnimationFrame(raf); };
    }, [entered]);
    return (
      <span className={'t-text-swap' + (entered ? '' : ' is-enter-start')}>{text}</span>
    );
  }

  /* ── Realtime session hook with transcript wiring + transcription
     activity timer. */
  function useRealtimeSession() {
    var clientRef = useRef(null);
    var statusState = useState('idle');
    var status = statusState[0], setStatus = statusState[1];
    var errorState = useState(null);
    var error = errorState[0], setError = errorState[1];
    var messagesState = useState([]);
    var messages = messagesState[0], setMessages = messagesState[1];
    var transcribingState = useState(false);
    var isTranscribing = transcribingState[0], setIsTranscribing = transcribingState[1];
    var transcribingTimerRef = useRef(null);

    var markActivity = useCallback(function () {
      setIsTranscribing(true);
      if (transcribingTimerRef.current) clearTimeout(transcribingTimerRef.current);
      transcribingTimerRef.current = setTimeout(function () {
        setIsTranscribing(false);
        transcribingTimerRef.current = null;
      }, 600);
    }, []);

    var pushTranscript = useCallback(function (ev) {
      markActivity();
      var role = ev.role;
      var text = ev.text;
      var final = !!ev.final;
      setMessages(function (msgs) {
        var copy = msgs.slice();
        if (role === 'user') {
          copy.push({ id: 'u-' + Date.now() + '-' + Math.random().toString(36).slice(2, 6), role: 'user', text: text, final: true });
          return copy;
        }
        var last = copy[copy.length - 1];
        if (last && last.role === 'assistant' && !last.final) {
          last.text = final ? text : (last.text + text);
          last.final = final;
          copy[copy.length - 1] = Object.assign({}, last);
          return copy;
        }
        copy.push({ id: 'a-' + Date.now() + '-' + Math.random().toString(36).slice(2, 6), role: 'assistant', text: text, final: final });
        return copy;
      });

      /* Mirror finals to the coordination conversation so the operator
         on the dashboard sees the same thread. User events from the
         realtime client are already complete utterances (the local
         transcript state treats every user event as final); assistant
         events stream, so only the final chunk is mirrored. Skipped
         entirely during agent_control — AI is silent in that mode. */
      var conv = global.equiti && global.equiti.conversation;
      if (!conv || conv.mode === 'agent_control') return;
      if (!text || !text.trim()) return;
      if (role === 'user') {
        conv.appendMessage('user', text, { via: 'voice' });
      } else if (role === 'assistant' && final) {
        conv.appendMessage('assistant', text, { via: 'voice' });
      }
    }, [markActivity]);

    var ensureClient = function () {
      if (clientRef.current) return clientRef.current;
      var tokenUrl = global.EQUITI_REALTIME_TOKEN_URL;
      if (!tokenUrl) {
        var msg = 'window.EQUITI_REALTIME_TOKEN_URL is not set';
        setError(msg);
        throw new Error(msg);
      }
      /* Tool dispatchers live in voice/voice-tools.jsx as a flat registry
         on window.VOICE_TOOL_DISPATCHERS — adding a new tool means adding
         a function there + a schema entry in mcp-server/src/realtime.ts.
         Voice-overlay no longer hardcodes any tool logic itself. */
      var clientTools = global.VOICE_TOOL_DISPATCHERS || {};
      var client = global.EquitiRealtimeClient.create({ tokenEndpoint: tokenUrl, clientTools: clientTools });
      client.on('status', function (s) { setStatus(s); });
      client.on('error', function (m) { setError(m); });
      client.on('transcript', pushTranscript);
      clientRef.current = client;
      return client;
    };

    var start = useCallback(function () {
      setError(null);
      try { var client = ensureClient(); return client.start(); }
      catch (e) { return Promise.resolve(); }
    }, []);
    var stop = useCallback(function () {
      if (!clientRef.current) return Promise.resolve();
      return clientRef.current.stop();
    }, []);
    var sendText = useCallback(function (text) {
      if (!clientRef.current || typeof clientRef.current.sendText !== 'function') return false;
      return clientRef.current.sendText(text);
    }, []);

    useEffect(function () {
      return function () {
        if (transcribingTimerRef.current) clearTimeout(transcribingTimerRef.current);
        if (clientRef.current) {
          clientRef.current.stop().catch(function () {});
          clientRef.current = null;
        }
      };
    }, []);

    return { status: status, error: error, messages: messages, isTranscribing: isTranscribing, start: start, stop: stop, sendText: sendText };
  }

  /* ── VoiceGlow — three concentric pulsing rings centred on the FAB.
     Modes:
       decision  → static halo at 0.18 opacity (no animation)
       voice / voiceMini  → pulses while isTranscribing, otherwise
                            also static at 0.18 opacity. */
  function VoiceGlow(props) {
    var visible = props.visible;
    var anchorBottom = props.anchorBottom;
    var pulsing = props.pulsing;     /* false in decision; voice/voiceMini follow isTranscribing */
    var staticHalo = props.staticHalo; /* true → render at low opacity even without pulsing */
    /* Delay baked into the animation shorthand (4th value, after the
       timing function, before iteration-count). Previously we set
       `animation: '... infinite'` AND `animationDelay: '0.4s'` on the
       same element — React 18+ warns about shorthand-vs-longhand
       collisions on every render. That warning was firing hundreds of
       times per voice session because every transcript delta triggers
       a re-render. Putting the delay in the shorthand kills the warning
       without changing any behaviour. */
    var rings = [
      { size: 220, anim: 'voice-glow-1 1.6s ease-in-out 0s infinite' },
      { size: 260, anim: 'voice-glow-2 1.4s ease-in-out 0.4s infinite' },
      { size: 300, anim: 'voice-glow-3 1.8s ease-in-out 0.8s infinite' },
    ];
    var baseOpacity = pulsing ? 1 : (staticHalo ? 0.18 : 0);
    return (
      <React.Fragment>
        {rings.map(function (r, i) {
          var half = r.size / 2;
          return (
            <div key={i} style={{
              position: 'absolute',
              left: '50%',
              bottom: anchorBottom + 'px',
              width: r.size + 'px',
              height: r.size + 'px',
              marginLeft: -half + 'px',
              marginBottom: -half + 'px',
              borderRadius: '50%',
              pointerEvents: 'none',
              background: 'radial-gradient(circle, rgba(0,175,171,0.7) 0%, rgba(0,175,171,0) 70%)',
              opacity: visible ? baseOpacity : 0,
              animation: pulsing && visible ? r.anim : 'none',
              transition: 'opacity ' + DECORATION_MS + 'ms ' + EASE + ', bottom ' + ANIM_MS + 'ms ' + EASE,
              zIndex: 9050,
            }} />
          );
        })}
      </React.Fragment>
    );
  }

  /* ── CancelX — dismiss the voice session entirely. Tracks FAB centre. */
  function CancelX(props) {
    var visible = props.visible;
    var anchorBottom = props.anchorBottom;
    return (
      <button
        onClick={function (e) { e.stopPropagation(); props.onEnd(); }}
        aria-label="End voice"
        style={{
          position: 'absolute',
          left: 'calc(50% + 70px)',
          bottom: (anchorBottom - 18) + 'px',
          width: '36px', height: '36px',
          borderRadius: '18px',
          border: 'none',
          background: '#FFFFFF',
          color: '#0E1420',
          fontFamily: 'Gilroy, system-ui, sans-serif',
          fontWeight: 700,
          fontSize: '16px',
          cursor: 'pointer',
          boxShadow: '0 6px 18px rgba(14,20,32,0.20), 0 0 0 1px rgba(14,20,32,0.06)',
          opacity: visible ? 1 : 0,
          transform: visible ? 'translateY(0)' : 'translateY(8px)',
          transition: 'opacity 220ms ' + EASE + ', transform 220ms ' + EASE + ', bottom ' + ANIM_MS + 'ms ' + EASE,
          pointerEvents: visible ? 'auto' : 'none',
          zIndex: 9300,
          padding: 0,
          display: 'flex',
          alignItems: 'center',
          justifyContent: 'center',
          lineHeight: 1,
        }}
      >×</button>
    );
  }

  /* ── DecisionTextBar — the read-only "Type…" pill on the LEFT of
     the FAB in the decision state. Tapping it commits to chat. */
  function DecisionTextBar(props) {
    var visible = props.visible;
    var onActivate = props.onActivate;
    return (
      <button
        onClick={function (e) { e.stopPropagation(); onActivate(); }}
        aria-label="Open chat"
        style={{
          position: 'absolute',
          left: '20px',
          bottom: '66px',                    /* centred at y=88 (44px tall) */
          width: '140px',
          height: '44px',
          display: 'flex',
          alignItems: 'center',
          gap: 8,
          background: '#FFFFFF',
          border: '1px solid rgba(14,20,32,0.08)',
          borderRadius: '22px',
          paddingLeft: 16,
          paddingRight: 16,
          boxShadow: '0px 1px 2px rgba(14, 20, 32, 0.06), 0 4px 14px rgba(14,20,32,0.06)',
          opacity: visible ? 1 : 0,
          transform: visible ? 'translateX(0)' : 'translateX(-12px)',
          transition: 'opacity ' + DECORATION_MS + 'ms ' + EASE + ', transform ' + DECORATION_MS + 'ms ' + EASE,
          pointerEvents: visible ? 'auto' : 'none',
          zIndex: 9100,
          cursor: 'pointer',
          font: 'inherit',
          margin: 0,
        }}
      >
        <span style={{
          fontFamily: 'Inter, system-ui, sans-serif',
          fontSize: 13,
          color: 'rgba(14,20,32,0.55)',
          fontWeight: 400,
        }}>Type…</span>
      </button>
    );
  }

  /* ── VoiceHintBar — mirror of DecisionTextBar on the RIGHT side of
     the FAB. Same width/height/radius/border/shadow as the "Type…"
     pill so the two read as a paired set across the FAB. Tapping
     it commits to voice (same effect as tapping the FAB). Pill
     stops just past the FAB ring so it doesn't visually collide. */
  function VoiceHintBar(props) {
    var visible = props.visible;
    var onActivateVoice = props.onActivateVoice;
    return (
      <button
        onClick={function (e) { e.stopPropagation(); onActivateVoice(); }}
        aria-label="Press to talk"
        style={{
          position: 'absolute',
          right: '20px',
          bottom: '66px',
          width: '140px',
          height: '44px',
          display: 'flex',
          alignItems: 'center',
          justifyContent: 'center',
          gap: 6,
          background: '#FFFFFF',
          border: '1px solid rgba(14,20,32,0.08)',
          borderRadius: '22px',
          paddingLeft: 14,
          paddingRight: 14,
          boxShadow: '0px 1px 2px rgba(14, 20, 32, 0.06), 0 4px 14px rgba(14,20,32,0.06)',
          opacity: visible ? 1 : 0,
          transform: visible ? 'translateX(0)' : 'translateX(12px)',
          transition: 'opacity ' + DECORATION_MS + 'ms ' + EASE + ', transform ' + DECORATION_MS + 'ms ' + EASE,
          pointerEvents: visible ? 'auto' : 'none',
          zIndex: 9100,
          cursor: 'pointer',
          font: 'inherit',
          margin: 0,
        }}
      >
        <svg width="14" height="14" viewBox="0 0 24 24" fill="none" style={{ flexShrink: 0 }}>
          <rect x="9" y="2" width="6" height="12" rx="3" stroke="#00AFAB" strokeWidth="2"/>
          <path d="M5 12a7 7 0 0014 0" stroke="#00AFAB" strokeWidth="2" strokeLinecap="round"/>
          <path d="M12 19v3" stroke="#00AFAB" strokeWidth="2" strokeLinecap="round"/>
        </svg>
        <span style={{
          fontFamily: 'Inter, system-ui, sans-serif',
          fontSize: 12,
          fontWeight: 500,
          color: '#00AFAB',
          letterSpacing: 0.2,
          whiteSpace: 'nowrap',
        }}>Press to talk</span>
      </button>
    );
  }

  /* ── ChatPanel — bottom sheet matching cash-flow-sheet.jsx exactly,
     scaled to 800ms tempo. Scrim backdrop + inner sheet sliding up
     from translateY(100%). The FAB floats as a translucent background
     orb at the panel centre (z 9200, 22% opacity); the chat input
     pill spans the full width at the bottom of the panel. */
  function ChatPanel(props) {
    var visible = props.visible;
    var fadeIn = props.fadeIn;
    var voiceMode = props.voiceMode;
    var session = props.session;
    var transcriptRef = useRef(null);
    useEffect(function () {
      var el = transcriptRef.current;
      if (el) el.scrollTop = el.scrollHeight;
    }, [session.messages.length]);

    var bottomPad = 96;
    var showChatInput = voiceMode === 'chat';

    /* Panel split: chrome (sheet bg + handle + header) at z 9001 and the
       content overlay (banner + transcript + input + caption) at z 9300,
       above the FAB at z 9200. This is what lets transcript bubbles
       visually layer ON TOP of the background-mode FAB rather than the
       other way around. Both layers share an identical transform and
       transition so they rise as one. */
    var HEADER_AREA_PX = 76; /* drag handle (16) + PanelHeader (~60) */
    var sheetGeometry = {
      position: 'absolute',
      left: 0, right: 0, bottom: 0,
      maxHeight: '90%',
      height: '760px',
      borderRadius: '24px 24px 0 0',
      overflow: 'hidden',
      transform: visible && fadeIn ? 'translateY(0)' : 'translateY(100%)',
      transition: 'transform ' + ANIM_MS + 'ms ' + EASE,
    };

    return (
      <React.Fragment>
        <div
          onClick={props.onScrimTap}
          style={{
            position: 'absolute',
            inset: 0,
            background: 'rgba(14,20,32,0.55)',
            opacity: fadeIn && visible ? 1 : 0,
            transition: 'opacity ' + ANIM_MS + 'ms ' + EASE,
            pointerEvents: visible ? 'auto' : 'none',
            zIndex: 9000,
          }}
        />
        {/* Sheet chrome — white bg + drag handle + header. z 9001. */}
        <div
          onClick={function (e) { e.stopPropagation(); }}
          style={Object.assign({}, sheetGeometry, {
            backgroundColor: '#FFFFFF',
            backgroundImage: 'radial-gradient(ellipse 120% 70% at 50% 0%, rgba(0,175,171,0.08) 0%, transparent 70%), linear-gradient(180deg, #F4FAF9 0%, #FFFFFF 60%)',
            boxShadow: '0px -12px 40px rgba(14, 20, 32, 0.24)',
            zIndex: 9001,
            display: 'flex',
            flexDirection: 'column',
          })}
        >
          <div style={{ display: 'flex', justifyContent: 'center', paddingTop: 8, paddingBottom: 4, flexShrink: 0 }}>
            <div style={{ width: 60, height: 4, borderRadius: 2, backgroundColor: 'rgba(14,20,32,0.10)' }} />
          </div>
          <PanelHeader session={session} onCollapse={props.onCollapse} onEndChat={props.onEndChat} />
        </div>
        {/* Content overlay — sits ABOVE the FAB (z 9200). Transparent so
           the gem-smoke shows through where there's no bubble. Wrapper
           is pointer-events:none so clicks fall through empty areas to
           the underlying chrome/scrim; interactive children opt back in. */}
        <div
          style={Object.assign({}, sheetGeometry, {
            backgroundColor: 'transparent',
            zIndex: 9300,
            pointerEvents: 'none',
            display: 'flex',
            flexDirection: 'column',
          })}
        >
          <div style={{ height: HEADER_AREA_PX, flexShrink: 0 }} />
          {props.agentControl ? (
            <div style={{
              margin: '0 16px 8px',
              padding: '8px 12px',
              background: 'rgba(245,158,11,0.12)',
              border: '1px solid rgba(245,158,11,0.35)',
              borderRadius: 10,
              fontFamily: 'Inter, system-ui, sans-serif',
              fontSize: 12,
              color: '#92400E',
              flexShrink: 0,
              pointerEvents: 'auto',
            }}>An agent has joined the chat. The AI is paused.</div>
          ) : null}
          <Transcript transcriptRef={transcriptRef} session={session} bottomPad={bottomPad} />
          {showChatInput ? (
            <ChatInputPill session={session} fadeIn={fadeIn} />
          ) : null}
          {voiceMode === 'voice' ? (
            <div style={{
              position: 'absolute',
              left: 0, right: 0,
              /* 28px + env(safe-area-inset-bottom) so 'Listening' sits
                 above the iOS home indicator on PWA. */
              bottom: 'calc(28px + env(safe-area-inset-bottom, 0px))',
              textAlign: 'center',
              fontFamily: 'Inter, system-ui, sans-serif',
              fontSize: 11,
              color: 'rgba(14,20,32,0.55)',
              letterSpacing: 0.3,
              pointerEvents: 'none',
            }}>Listening</div>
          ) : null}
        </div>
      </React.Fragment>
    );
  }

  function PanelHeader(props) {
    var session = props.session;
    var connected = session.status === 'connected';
    var connecting = session.status === 'connecting';
    var dotColor = connected ? '#00AFAB' : connecting ? '#F59E0B' : '#9CA3AF';
    var labelText = connecting ? 'Connecting…' : connected ? 'Live' : (session.error ? 'Offline' : 'Ready');
    return (
      <div style={{
        flexShrink: 0,
        paddingLeft: 20,
        paddingRight: 20,
        paddingTop: 6,
        paddingBottom: 14,
        display: 'flex',
        flexDirection: 'row',
        alignItems: 'center',
        justifyContent: 'space-between',
        borderBottom: '1px solid rgba(14,20,32,0.06)',
      }}>
        <div>
          <div style={{ fontFamily: 'Gilroy, system-ui, sans-serif', fontWeight: 700, fontSize: 20, color: '#0E1420' }}>Equiti+ Assistant</div>
          <div style={{ display: 'flex', alignItems: 'center', gap: 6, marginTop: 2 }}>
            <span style={{
              width: 7, height: 7, borderRadius: 4,
              background: dotColor,
              boxShadow: connected ? '0 0 6px rgba(0,175,171,0.6)' : 'none',
              animation: connected ? 'voice-live-dot 1.6s ease-in-out infinite' : 'none',
            }} />
            <span style={{
              fontFamily: 'Inter, system-ui, sans-serif',
              fontSize: 11,
              color: connected ? '#00AFAB' : connecting ? '#F59E0B' : '#6B7280',
              fontWeight: 500,
              letterSpacing: 0.2,
            }}>{labelText}</span>
          </div>
        </div>
        <div style={{ display: 'flex', alignItems: 'center', gap: 8 }}>
          {/* End chat — resolves the conversation server-side (drops it
             from the dashboard's escalation queue) and closes the panel.
             Distinct from the close glyph below, which just minimises. */}
          <button
            onClick={props.onEndChat}
            aria-label="End chat"
            style={{
              border: '1px solid rgba(14,20,32,0.08)',
              background: '#FFFFFF',
              padding: '6px 10px',
              margin: 0,
              cursor: 'pointer',
              borderRadius: 14,
              fontFamily: 'Inter, system-ui, sans-serif',
              fontSize: 11,
              fontWeight: 600,
              color: '#0E1420',
              letterSpacing: 0.2,
            }}
          >End chat</button>
          <button
            onClick={props.onCollapse}
            aria-label="Close"
            style={{
              border: 'none',
              background: '#F2F4F7',
              padding: 0, margin: 0,
              cursor: 'pointer',
              width: 32, height: 32,
              borderRadius: 16,
              display: 'flex', alignItems: 'center', justifyContent: 'center',
            }}
          >
            <svg width="18" height="18" viewBox="0 0 24 24" fill="none">
              <path d="M6 6l12 12M18 6L6 18" stroke="#6B7280" strokeWidth="2" strokeLinecap="round"/>
            </svg>
          </button>
        </div>
      </div>
    );
  }

  function Transcript(props) {
    var session = props.session;
    var bottomPad = props.bottomPad;
    var conv = window.useConversation ? window.useConversation() : null;

    /* Merge local realtime transcript with conversation-store messages.
       Conv messages dedupe against session.messages on (role, text) so
       a final transcript we mirrored to the store doesn't show twice.
       Operator messages (role='agent') only ever live in the conv store,
       so they always render. */
    var unified = [];
    var seen = new Set();
    for (var i = 0; i < session.messages.length; i++) {
      var m = session.messages[i];
      unified.push({ id: m.id, role: m.role, text: m.text, final: m.final });
      seen.add(m.role + '|' + (m.text || '').trim());
    }
    if (conv && conv.messages) {
      for (var j = 0; j < conv.messages.length; j++) {
        var cm = conv.messages[j];
        var key = cm.role + '|' + (cm.content || '').trim();
        if (seen.has(key)) continue;
        unified.push({ id: cm.id, role: cm.role, text: cm.content, final: true, agentName: cm.agentName });
      }
    }

    return (
      <div ref={props.transcriptRef} style={{
        flex: 1,
        overflowY: 'auto',
        padding: '14px 20px ' + bottomPad + 'px',
        display: 'flex', flexDirection: 'column', gap: 10,
        minHeight: 0,
        position: 'relative',
        zIndex: 2,
        pointerEvents: 'auto',
      }}>
        {unified.map(function (m) {
          var isUser = m.role === 'user';
          var isAgent = m.role === 'agent';
          /* Glass recipe — semi-transparent backgrounds with backdrop
             blur, matching the bottom-nav pill's frosted look. The teal
             halo for user / yellow tint for agent / neutral for AI stays
             but at ~65% opacity so the FAB watermark behind reads through. */
          var bg = isUser
            ? 'rgba(0,175,171,0.62)'
            : isAgent
              ? 'rgba(254,246,231,0.78)'
              : 'rgba(255,255,255,0.72)';
          var color = isUser ? '#FFFFFF' : '#0E1420';
          var border = isAgent
            ? '1px solid rgba(245,158,11,0.45)'
            : (isUser
                ? '1px solid rgba(255,255,255,0.28)'
                : '1px solid rgba(0,175,171,0.22)');
          return (
            <div key={m.id} style={{
              alignSelf: isUser ? 'flex-end' : 'flex-start',
              maxWidth: '78%',
              background: bg,
              backdropFilter: 'blur(8px) saturate(140%)',
              WebkitBackdropFilter: 'blur(8px) saturate(140%)',
              color: color,
              padding: '10px 14px',
              borderRadius: isUser ? '14px 14px 4px 14px' : '14px 14px 14px 4px',
              fontFamily: 'Inter, system-ui, sans-serif',
              fontSize: 13,
              lineHeight: 1.45,
              border: border,
              boxShadow: isUser
                ? '0 4px 14px rgba(0,175,171,0.22), inset 0 1px 0 rgba(255,255,255,0.30)'
                : '0 2px 8px rgba(14,20,32,0.06), inset 0 1px 0 rgba(255,255,255,0.60)',
            }}>
              {isAgent ? (
                <div style={{ fontSize: 10, fontWeight: 600, color: '#92400E', marginBottom: 2 }}>
                  {m.agentName || 'Agent'}
                </div>
              ) : null}
              <BubbleText text={m.text || ''} />
              {!m.final && !isUser ? <span style={{ opacity: 0.5 }}> ▍</span> : ''}
            </div>
          );
        })}
      </div>
    );
  }

  /* ── ChatInputPill — iOS Messages-style composer spanning the full
     width at the bottom of the panel. Fades in from left after the
     panel-rise begins (240ms delayed). */
  function ChatInputPill(props) {
    var session = props.session;
    var fadeIn = props.fadeIn;
    var inputState = useState('');
    var input = inputState[0], setInput = inputState[1];
    var inputRef = useRef(null);

    function handleSend(e) {
      if (e && e.preventDefault) e.preventDefault();
      var text = input.trim();
      if (!text) return;
      setInput('');
      var conv = window.equiti && window.equiti.conversation;
      if (!conv) return;
      /* During agent_control the AI is silent; the operator is driving.
         Just mirror the user's message to the conversation. */
      if (conv.mode === 'agent_control') {
        conv.appendMessage('user', text, { via: 'typed' });
        return;
      }
      /* HTTP chat completion path: server appends both the user message
         and the AI reply, so the conversation poll picks both up within
         ~2s. No mic / WebRTC required. */
      conv.sendChat(text);
    }

    return (
      <form
        onSubmit={handleSend}
        onClick={function (e) { e.stopPropagation(); }}
        style={{
          position: 'absolute',
          left: 16, right: 16,
          /* 18px + env(safe-area-inset-bottom) so the chat input pill
             sits above the iOS home indicator on PWA. */
          bottom: 'calc(18px + env(safe-area-inset-bottom, 0px))',
          height: 48,
          display: 'flex',
          alignItems: 'center',
          background: '#FFFFFF',
          border: '1px solid rgba(14,20,32,0.08)',
          borderRadius: 24,
          paddingLeft: 16,
          paddingRight: 4,
          boxShadow: '0px 1px 2px rgba(14,20,32,0.06), 0 6px 18px rgba(14,20,32,0.08)',
          zIndex: 3,
          opacity: fadeIn ? 1 : 0,
          transform: fadeIn ? 'translateX(0)' : 'translateX(-20px)',
          transition: 'opacity 240ms ' + EASE + ' 320ms, transform 240ms ' + EASE + ' 320ms',
          pointerEvents: 'auto',
        }}
      >
        <input
          ref={inputRef}
          value={input}
          onChange={function (e) { setInput(e.target.value); }}
          placeholder="Message…"
          style={{
            flex: 1,
            minWidth: 0,
            border: 'none',
            background: 'transparent',
            outline: 'none',
            color: '#0E1420',
            fontFamily: 'Inter, system-ui, sans-serif',
            fontSize: 14,
            padding: 0,
          }}
        />
        <button type="submit" aria-label="Send" style={{
          border: 'none',
          background: input.trim()
            ? 'linear-gradient(135deg, #00C9C5 0%, #00AFAB 100%)'
            : '#E4E7EC',
          color: '#fff',
          width: 36, height: 36,
          borderRadius: 18,
          cursor: input.trim() ? 'pointer' : 'default',
          padding: 0,
          display: 'flex', alignItems: 'center', justifyContent: 'center',
          boxShadow: input.trim() ? '0 4px 12px rgba(0,175,171,0.30)' : 'none',
          flexShrink: 0,
          transition: 'background 200ms ' + EASE + ', box-shadow 200ms ' + EASE,
        }}>
          <svg width="16" height="16" viewBox="0 0 24 24" fill="none">
            <path d="M5 12h14M13 5l7 7-7 7" stroke="currentColor" strokeWidth="2.4" strokeLinecap="round" strokeLinejoin="round"/>
          </svg>
        </button>
      </form>
    );
  }

  /* ── VoiceOverlay — orchestrator. */
  function VoiceOverlay(props) {
    var mode = props.mode;
    var onSetMode = props.onSetMode;
    var onEnd = props.onEnd;
    /* `suppressed` is set by App when a popup (cash-flow sheet,
       EquitiPlusModal) is open — hides glow, CancelX, and decision
       pills so they don't render on top of the modal. */
    var suppressed = !!props.suppressed;
    var session = useRealtimeSession();
    var conv = window.useConversation ? window.useConversation() : null;
    var convMode = conv ? conv.mode : 'ai_control';

    var isPanelMode = mode === 'chat' || mode === 'voice';
    var isDecision = mode === 'decision';
    var isChat = mode === 'chat';
    var isVoice = mode === 'voice';
    var isVoiceMini = mode === 'voiceMini';

    /* Lazy conversation creation: on first entry to chat/voice the
       SuperApp tells the coordination server it exists. Idempotent —
       subsequent calls share the in-flight promise. */
    useEffect(function () {
      if (mode === 'chat' || mode === 'voice') {
        if (conv && conv.ensureConversation) {
          conv.ensureConversation();
        }
      }
    }, [mode]);

    /* Client-triggered chat greeting. The voice path already greets via
       response.create on data-channel open (see realtime-client.js); the
       typed-chat path has no such event, so we fire it manually here.
       Conditions: chat mode is open AND we haven't already greeted this
       conversation AND no messages exist yet (so voice-first → chat
       transitions don't trigger a second greeting after the voice intro).
       Reset on conversationId change so a fresh conversation re-greets. */
    var greetingFiredRef = useRef(null);
    useEffect(function () {
      if (!conv) return;
      if (mode !== 'chat') return;
      var convId = conv.conversationId;
      if (!convId) return;
      if (greetingFiredRef.current === convId) return;
      if (conv.messages && conv.messages.length > 0) return;
      greetingFiredRef.current = convId;
      if (conv.sendGreeting) conv.sendGreeting();
    }, [mode, conv && conv.conversationId, conv && conv.messages && conv.messages.length]);

    /* Snapshot pushing moved into ConversationProvider so it runs
       continuously while a conversation exists — not just while the
       chat panel is open. See conversation-store.jsx. */

    /* Operator takeover handoff: when the conversation flips to
       agent_control, collapse voice → chat (so the user can see the
       agent's typed messages) and suspend the realtime session so the
       AI stays silent. Per spec the user doesn't auto-resume voice on
       return-to-AI — they tap the FAB again if they want it back. */
    useEffect(function () {
      if (convMode === 'agent_control') {
        if (mode === 'voice') onSetMode('chat');
        if (session.status !== 'idle') session.stop();
      }
    }, [convMode]);

    /* Mount/fade gate — using DOUBLE requestAnimationFrame so the
       initial render with translateY(100%) actually paints before
       the transition target flips. Without this, React batches the
       mount and the fadeIn flip into the same render and the slide
       animation never engages — the panel just appears. */
    var mountedState = useState(false);
    var mounted = mountedState[0], setMounted = mountedState[1];
    var fadeInState = useState(false);
    var fadeIn = fadeInState[0], setFadeIn = fadeInState[1];

    useEffect(function () {
      if (mode !== 'idle') {
        setMounted(true);
        /* Both rafs are tracked so the cleanup cancels whichever
           is in flight when the effect tears down. */
        var raf2Holder = { id: 0 };
        var raf1 = requestAnimationFrame(function () {
          raf2Holder.id = requestAnimationFrame(function () { setFadeIn(true); });
        });
        return function () {
          cancelAnimationFrame(raf1);
          if (raf2Holder.id) cancelAnimationFrame(raf2Holder.id);
        };
      } else if (mounted) {
        setFadeIn(false);
        var t = setTimeout(function () { setMounted(false); }, ANIM_MS);
        return function () { clearTimeout(t); };
      }
    }, [mode]);

    /* Voice session lifecycle. Starts only when entering voice or
       voiceMini — decision and chat do NOT start the voice session
       (per user spec: voice doesn't auto-start). */
    useEffect(function () {
      if ((mode === 'voice' || mode === 'voiceMini') && session.status === 'idle') {
        session.start();
      }
    }, [mode]);
    useEffect(function () {
      if (mode === 'idle' && session.status !== 'idle') {
        session.stop();
      }
    }, [mode, session.status]);

    var anchorBottom = (isVoice || isChat) ? FAB_CENTRE_PANEL_Y : FAB_CENTRE_BOTTOM_Y;
    var glowVisible = !suppressed && (isDecision || isVoice || isVoiceMini) && fadeIn;
    /* In decision: static halo, no pulse. In voice/voiceMini: pulse
       only while transcription is active. */
    var glowPulsing = (isVoice || isVoiceMini) && session.isTranscribing;
    var glowStaticHalo = isDecision || isVoice || isVoiceMini;

    if (!mounted) return null;

    return (
      <React.Fragment>
        <ChatPanel
          visible={isPanelMode}
          fadeIn={fadeIn}
          voiceMode={mode}
          session={session}
          agentControl={convMode === 'agent_control'}
          onScrimTap={function () {
            if (isVoice) onSetMode('voiceMini');
            else onSetMode('idle');
          }}
          onCollapse={function () {
            if (isVoice) onSetMode('voiceMini');
            else onSetMode('idle');
          }}
          onEndChat={function () {
            /* Hard end — resolves the conversation server-side so it
               drops from the dashboard queue and history, stops the
               realtime session, and closes the panel back to idle. */
            if (conv && conv.resolve) conv.resolve();
            if (session.status !== 'idle') session.stop();
            onEnd();
          }}
        />
        {/* Decision-mode dismiss catcher — invisible, captures clicks
           anywhere outside the FAB and pills to bail back to idle.
           Sits below the FAB (z 9200) and pills (z 9100) so taps on
           those still route through. */}
        <div
          onClick={function () { onSetMode('idle'); }}
          style={{
            position: 'absolute',
            inset: 0,
            background: 'transparent',
            pointerEvents: isDecision ? 'auto' : 'none',
            zIndex: 9000,
          }}
        />
        <DecisionTextBar
          visible={!suppressed && isDecision && fadeIn}
          onActivate={function () { onSetMode('chat'); }}
        />
        <VoiceHintBar
          visible={!suppressed && isDecision && fadeIn}
          onActivateVoice={function () { onSetMode('voice'); }}
        />
        <VoiceGlow
          visible={glowVisible}
          anchorBottom={anchorBottom}
          pulsing={glowPulsing}
          staticHalo={glowStaticHalo}
        />
        <CancelX
          /* Only show when the panel is collapsed (voiceMini). When the
             chat pop-up is open in voice mode, the panel's End-chat /
             Close glyphs take over and the floating × would be
             redundant. Also hidden while a modal popup is showing
             (suppressed=true). */
          visible={!suppressed && isVoiceMini && fadeIn}
          anchorBottom={anchorBottom}
          onEnd={onEnd}
        />
      </React.Fragment>
    );
  }

  global.VoiceOverlay = VoiceOverlay;
})(typeof window !== 'undefined' ? window : this);
