// ambient_audio.jsx — audio bed for the ASI Workflow Infographic.
//
// Owns both the ambient music loop and the per-scene voiceover clips, all
// declaratively synced to the timeline context exposed by animations.jsx
// (`time`, `playing` from useTimeline()).
//
// Layout:
//   audio/music.mp3          → looped ambient bed, low volume
//   audio/vo_1.mp3 … vo_6.mp3 → scene voiceovers, triggered at scene starts
//
// UI: a single bottom-left pill toggles the whole bed on/off. First click is
// required for autoplay (browsers block unmuted autoplay without user gesture).

const MUSIC_SRC = 'overview/audio/music.mp3';
const MUSIC_VOLUME = 0.18;   // ambient pad — slightly more present than before
const INTRO_SECONDS = 5.0;   // matches INTRO_SECONDS in main HTML
const VO_VOLUME = 0.95;

// Must mirror SCENES in the main HTML. Each VO starts 1.5s into its scene so
// the scene has finished fading in before the voice speaks.
// fadeIn (optional) = seconds to ramp volume 0 → VO_VOLUME after the clip starts.
const VO_TRACKS = [
  { src: 'overview/audio/vo_1.mp3', start:   6.5 }, // scene 1 (5.0) + 1.5s fade-in
  { src: 'overview/audio/vo_2.mp3', start:  23.7 }, // scene 2 (22.2) + 1.5s fade-in
  { src: 'overview/audio/vo_3.mp3', start:  63.5 }, // scene 3 (62.0) + 1.5s fade-in
  { src: 'overview/audio/vo_4.mp3', start:  92.1 }, // scene 4 (90.6) + 1.5s fade-in
  { src: 'overview/audio/vo_5.mp3', start: 119.3 }, // scene 5 (117.8) + 1.5s fade-in
  { src: 'overview/audio/vo_6.mp3', start: 133.5 }, // scene 6 (132.0) + 1.5s fade-in
];

function fadeGain(audioEl, target, ms = 300) {
  if (!audioEl) return;
  const start = audioEl.volume;
  const t0 = performance.now();
  const step = () => {
    const k = Math.min(1, (performance.now() - t0) / ms);
    audioEl.volume = start + (target - start) * k;
    if (k < 1) requestAnimationFrame(step);
  };
  step();
}

function AmbientAudio() {
  const { time, playing } = useTimeline();
  const [enabled, setEnabled] = React.useState(false);

  const musicRef = React.useRef(null);
  const voRefs = React.useRef(VO_TRACKS.map(() => React.createRef()));

  // Which VO clip corresponds to the current timeline second.
  const currentIdx = React.useMemo(() => {
    let idx = 0;
    for (let i = 0; i < VO_TRACKS.length; i++) {
      if (time >= VO_TRACKS[i].start) idx = i;
    }
    return idx;
  }, [time]);

  // Expose an imperative enable handle so the click-to-start overlay in the
  // main HTML can flip sound on at the same time it begins playback — this
  // is what satisfies the browser's user-gesture requirement for autoplay.
  React.useEffect(() => {
    window.__startAudio = () => setEnabled(true);
    return () => { if (window.__startAudio) delete window.__startAudio; };
  }, []);

  // ── Music: play/pause mirrors enabled+playing ───────────────────────────
  React.useEffect(() => {
    const m = musicRef.current;
    if (!m) return;
    if (enabled && playing) {
      m.play().catch(() => {});
    } else {
      try { m.pause(); } catch (e) {}
    }
  }, [enabled, playing]);

  // ── Music volume: a pure function of timeline time ──────────────────────
  // During the 0 → INTRO_SECONDS intro, volume ramps 0 → MUSIC_VOLUME and
  // stays there afterwards. Uses an ease-out quad curve so the music is
  // audibly present within ~1s (instead of a slow linear crawl from 0) while
  // still reaching full volume in time with the visual fade-in.
  // Tying this to timeline `time` (not wall clock) means rewind/pause/resume
  // keep the music volume in the right place.
  React.useEffect(() => {
    const m = musicRef.current;
    if (!m) return;
    const introK = Math.max(0, Math.min(1, time / INTRO_SECONDS));
    const eased  = 1 - (1 - introK) * (1 - introK); // easeOutQuad
    m.volume = enabled ? MUSIC_VOLUME * eased : 0;
  }, [enabled, time]);

  // ── VO sync — runs every frame so voStart gate + drift reseek re-fire ─
  // Previously this depended only on [enabled, currentIdx, playing], which
  // meant the `time < voStart` gate was evaluated once per scene boundary and
  // never rechecked — so the VO of the current scene never started unless the
  // scene boundary itself crossed. Depending on `time` forces re-evaluation
  // every frame (~60Hz). The body short-circuits when state is already
  // correct, so cost is trivial.
  React.useEffect(() => {
    if (!enabled) {
      voRefs.current.forEach(r => { try { r.current?.pause(); } catch(e){} });
      return;
    }

    voRefs.current.forEach((r, i) => {
      const el = r.current;
      if (!el) return;
      if (i === currentIdx) {
        const voStart = VO_TRACKS[i].start;
        if (time < voStart) {
          // Before scheduled start — keep paused at 0 so clip begins cleanly.
          if (!el.paused) { try { el.pause(); } catch(e){} }
          if (el.currentTime !== 0) el.currentTime = 0;
          el.volume = 0;
          return;
        }
        const offset = time - voStart;
        // Past the clip's natural end — keep it paused and silent. Without
        // this guard, calling play() on an `ended` HTMLAudioElement causes
        // the browser to reset currentTime to 0 and replay; combined with
        // per-frame reseeks this produced a screechy, rapid-fire start/stop
        // during the gap between one VO ending and the next beginning.
        const dur = isFinite(el.duration) ? el.duration : Infinity;
        if (offset >= dur) {
          if (!el.paused) { try { el.pause(); } catch(e){} }
          el.volume = 0;
          return;
        }
        el.volume = VO_VOLUME;
        if (playing) {
          if (Math.abs(el.currentTime - offset) > 0.4) el.currentTime = offset;
          if (el.paused) el.play().catch(() => {});
        } else {
          if (!el.paused) { try { el.pause(); } catch(e){} }
          if (Math.abs(el.currentTime - offset) > 0.1) el.currentTime = offset;
        }
      } else {
        if (!el.paused) { try { el.pause(); } catch(e){} }
        if (el.currentTime !== 0) el.currentTime = 0;
      }
    });
  }, [enabled, currentIdx, playing, time]);

  const toggle = () => setEnabled(v => !v);

  return (
    <div style={{
      position: 'absolute',
      bottom: 24,
      left: 24,
      zIndex: 25,
      display: 'flex',
      alignItems: 'center',
      gap: 10,
      padding: '8px 14px 8px 10px',
      borderRadius: 999,
      background: 'rgba(255,255,255,0.82)',
      backdropFilter: 'blur(4px)',
      boxShadow: '0 1px 3px rgba(0,0,0,0.08)',
      fontFamily: 'Inter, sans-serif',
      fontSize: 14,
      fontWeight: 600,
      letterSpacing: '0.04em',
      textTransform: 'uppercase',
      color: '#4a4f57',
      cursor: 'pointer',
      userSelect: 'none',
    }} onClick={toggle} role="button" aria-label={enabled ? 'Mute sound' : 'Play sound'}>
      <span style={{
        width: 26, height: 26, borderRadius: 999,
        background: enabled ? '#7f35b2' : '#e8eaed',
        color: enabled ? '#fff' : '#4a4f57',
        display: 'inline-flex', alignItems: 'center', justifyContent: 'center',
        transition: 'background 200ms',
      }}>
        {enabled ? (
          <svg width="12" height="12" viewBox="0 0 16 16" fill="currentColor">
            <path d="M7 2 L3 5.5 H1 V10.5 H3 L7 14 Z"/>
            <path d="M10 5 Q12 8 10 11" fill="none" stroke="currentColor" strokeWidth="1.6" strokeLinecap="round"/>
            <path d="M12 3 Q15 8 12 13" fill="none" stroke="currentColor" strokeWidth="1.6" strokeLinecap="round"/>
          </svg>
        ) : (
          <svg width="12" height="12" viewBox="0 0 16 16" fill="currentColor">
            <path d="M7 2 L3 5.5 H1 V10.5 H3 L7 14 Z"/>
            <path d="M10 6 L14 10 M14 6 L10 10" stroke="currentColor" strokeWidth="1.5" strokeLinecap="round"/>
          </svg>
        )}
      </span>
      <span>{enabled ? 'Sound on' : 'Sound off'}</span>

      {/* Hidden audio elements — managed by the effects above */}
      <audio ref={musicRef} src={MUSIC_SRC} loop preload="auto" style={{display:'none'}}/>
      {VO_TRACKS.map((t, i) => (
        <audio
          key={t.src}
          ref={voRefs.current[i]}
          src={t.src}
          preload="auto"
          style={{display:'none'}}
        />
      ))}
    </div>
  );
}

Object.assign(window, { AmbientAudio });
