;(function () {
  /* ConversationStore — local-only conversation state for the Cloudflare
     deployment. Source of truth is the React state in this provider.
     The Worker exposes ONLY /chat/completion and /chat/greeting (stateless);
     all operator-dashboard / poll / persist endpoints have been stripped.

     Calls preserved for backward API compatibility with voice-overlay,
     voice-tools, and any window.equiti.conversation consumer:
       sessionId, conversationId, messages, mode,
       ensureConversation, appendMessage, escalate, resolve,
       pushClientSnapshot, sendChat, sendGreeting.
  */

  const BASE = (typeof window !== 'undefined' && window.MCP_BASE) || 'http://localhost:3001';

  /* sessionId is window-scope only; new value per page load per spec. */
  const sessionId = 's_' + Date.now().toString(36) + Math.random().toString(36).slice(2, 8);

  /* conversationId — generated client-side; no server round trip. */
  function newConversationId() {
    return 'c_' + Date.now().toString(36) + Math.random().toString(36).slice(2, 8);
  }

  function newMessageId() {
    return 'm_' + Date.now().toString(36) + Math.random().toString(36).slice(2, 8);
  }

  const Ctx = React.createContext(null);

  function ConversationProvider({ children }) {
    const [conversationId, setConversationId] = React.useState(null);
    const [messages, setMessages] = React.useState([]);
    const [mode, setMode] = React.useState('ai_control');

    /* Lazy create. Idempotent — no server call, just a local id. */
    const ensureConversation = React.useCallback(async function () {
      if (conversationId) return conversationId;
      const id = newConversationId();
      setConversationId(id);
      return id;
    }, [conversationId]);

    /* Append a message to local state. No server POST — the deployed
       Worker has no conversation persistence. Returns the appended
       message so callers can reference its id. */
    const appendMessage = React.useCallback(async function (role, content, meta) {
      if (!content || !content.trim()) return null;
      await ensureConversation();
      const msg = {
        id: newMessageId(),
        role: role,
        content: content,
        timestamp: new Date().toISOString(),
        ...(meta ? { meta } : {}),
      };
      setMessages(function (prev) { return prev.concat([msg]); });
      return msg;
    }, [ensureConversation]);

    /* Stateless typed-chat completion. POSTs {history, content} to the
       Worker, which forwards to OpenAI Chat Completions and returns
       {reply}. We then append both user + assistant locally. */
    const sendChat = React.useCallback(async function (content) {
      if (!content || !content.trim()) return null;
      await ensureConversation();
      /* Snapshot history BEFORE appending the user turn. */
      const history = messages
        .filter(function (m) { return m.role === 'user' || m.role === 'assistant'; })
        .slice(-20)
        .map(function (m) { return { role: m.role, content: m.content }; });
      /* Append user turn immediately so the UI shows it while waiting. */
      const userMsg = {
        id: newMessageId(),
        role: 'user',
        content: content,
        timestamp: new Date().toISOString(),
        meta: { via: 'typed' },
      };
      setMessages(function (prev) { return prev.concat([userMsg]); });

      try {
        const r = await fetch(BASE + '/chat/completion', {
          method: 'POST',
          headers: { 'content-type': 'application/json' },
          body: JSON.stringify({ history: history, content: content }),
        });
        if (!r.ok) return null;
        const data = await r.json();
        const reply = data && data.reply;
        if (!reply) return null;
        const assistantMsg = {
          id: newMessageId(),
          role: 'assistant',
          content: reply,
          timestamp: new Date().toISOString(),
          meta: { via: 'typed' },
        };
        setMessages(function (prev) { return prev.concat([assistantMsg]); });
        return reply;
      } catch (_) {
        return null;
      }
    }, [messages, ensureConversation]);

    /* Stateless greeting. POSTs {} to the Worker, gets {reply}, appends
       assistant locally. Caller (voice-overlay) is responsible for
       idempotency — call once per conversation open. */
    const sendGreeting = React.useCallback(async function () {
      await ensureConversation();
      try {
        const r = await fetch(BASE + '/chat/greeting', {
          method: 'POST',
          headers: { 'content-type': 'application/json' },
          body: JSON.stringify({}),
        });
        if (!r.ok) return null;
        const data = await r.json();
        const reply = data && data.reply;
        if (!reply) return null;
        const msg = {
          id: newMessageId(),
          role: 'assistant',
          content: reply,
          timestamp: new Date().toISOString(),
          meta: { via: 'typed', greeting: true },
        };
        setMessages(function (prev) { return prev.concat([msg]); });
        return reply;
      } catch (_) {
        return null;
      }
    }, [ensureConversation]);

    /* Escalation: no operator dashboard in the cloud deployment, so this
       just flips a local mode flag and appends a system note. Voice-side
       escalate_to_human still works (it's client-dispatched). */
    const escalate = React.useCallback(async function (_reason) {
      setMode('escalated');
    }, []);

    /* Resolve: clears local state and re-issues a fresh conversation id
       on next ensureConversation call. */
    const resolve = React.useCallback(async function () {
      setConversationId(null);
      setMessages([]);
      setMode('ai_control');
    }, []);

    /* Client snapshot push: no server target, so this is a no-op. Kept
       on the API surface so existing snapshot loops upstream don't break. */
    const pushClientSnapshot = React.useCallback(function (_snapshot) {
      /* no-op */
    }, []);

    const value = React.useMemo(function () {
      return {
        sessionId: sessionId,
        conversationId: conversationId,
        messages: messages,
        mode: mode,
        ensureConversation: ensureConversation,
        appendMessage: appendMessage,
        escalate: escalate,
        resolve: resolve,
        pushClientSnapshot: pushClientSnapshot,
        sendChat: sendChat,
        sendGreeting: sendGreeting,
      };
    }, [conversationId, messages, mode, ensureConversation, appendMessage, escalate, resolve, pushClientSnapshot, sendChat, sendGreeting]);

    /* Expose to non-React callers (the realtime client's tool dispatchers,
       voice-overlay decorations) so they can mirror without prop-drilling. */
    React.useEffect(function () {
      window.equiti = window.equiti || {};
      window.equiti.conversation = value;
    }, [value]);

    return <Ctx.Provider value={value}>{children}</Ctx.Provider>;
  }

  function useConversation() {
    return React.useContext(Ctx);
  }

  Object.assign(window, { ConversationProvider, useConversation });
})();
