// ============ TRACK HELPERS ============
const KEYS = ['8A','9A','7A','8B','9B','10A','7B','6A','11A','12A','5A','10B'];
const TITLES = [
  { title: 'Obsidian Rain', artist: 'Unknown' },
  { title: 'Velvet Hour', artist: 'Unknown' },
  { title: 'Chrome Tears', artist: 'Unknown' },
  { title: 'After Hours', artist: 'Unknown' },
  { title: 'Ember Drift', artist: 'Unknown' },
  { title: 'Neon Gospel', artist: 'Unknown' },
];

let __trackIdSeq = 1;

// Camelot key-distance → suggested semitone shift to land slave on master's key.
// Inputs: '8A', '11B' etc. Each Camelot number step around the wheel is a
// perfect fifth (7 semitones) in pitch space; A↔B at the same number is the
// relative-minor/major pair (same key signature, no shift). We return the
// SHORTEST signed semitone distance (mod 12, range −6..+6).
function camelotSemitoneDistance(slaveKey, masterKey) {
  const m1 = /^(\d+)([AB])$/.exec(slaveKey || '');
  const m2 = /^(\d+)([AB])$/.exec(masterKey || '');
  if (!m1 || !m2) return null;
  const n1 = parseInt(m1[1], 10), n2 = parseInt(m2[1], 10);
  // Treat A and B as same chroma (relative minor/major), so we only need the
  // numeric distance around the 12-step wheel.
  let stepDiff = ((n2 - n1) % 12 + 12) % 12;     // 0..11
  let semi = stepDiff * 7;                        // 7 semis per Camelot step
  semi = ((semi % 12) + 12) % 12;                // wrap into 0..11
  if (semi > 6) semi -= 12;                      // shortest signed (−5..+6)
  return semi;
}

function makeTrack({ title, artist, buffer, bpm = 120, firstBeat = 0, key = '8A', duration }) {
  const id = 'tr' + (__trackIdSeq++);
  const seed = Math.abs([...(id + title)].reduce((a,c) => (a*31 + c.charCodeAt(0))|0, 0));
  return {
    id, title, artist, buffer,
    bpm, __origBpm: bpm, firstBeat, key,
    duration: duration || (buffer?.duration || 0),
    seed: seed % 1000 + 1,
  };
}

// ============ DEMO TRACKS (synthesized in-browser, so they definitely play) ============
function makeDemoBuffer(ctx, seconds, bpm, hue) {
  const sr = ctx.sampleRate;
  const len = Math.floor(sr * seconds);
  const buffer = ctx.createBuffer(2, len, sr);
  const beatSec = 60 / bpm;
  for (let ch = 0; ch < 2; ch++) {
    const data = buffer.getChannelData(ch);
    for (let i = 0; i < len; i++) {
      const t = i / sr;
      // Kick on every beat
      const beatT = t % beatSec;
      const kick = beatT < 0.15 ? Math.sin(2 * Math.PI * 60 * beatT) * Math.exp(-beatT * 18) * 0.9 : 0;
      // Hihat on off-beats
      const hiT = (t + beatSec/2) % beatSec;
      const hat = hiT < 0.05 ? (Math.random() * 2 - 1) * Math.exp(-hiT * 80) * 0.15 : 0;
      // Bass: root note
      const freq = 55 * Math.pow(2, (hue % 12) / 12);
      const bass = Math.sin(2 * Math.PI * freq * t) * 0.18 * (0.5 + 0.5 * Math.sin(2 * Math.PI * t / (beatSec * 4)));
      // Pad: chord
      const pad = (Math.sin(2*Math.PI*(freq*4)*t) + Math.sin(2*Math.PI*(freq*4*1.25)*t) + Math.sin(2*Math.PI*(freq*4*1.5)*t)) * 0.04;
      data[i] = kick + hat + bass + pad;
    }
  }
  return buffer;
}

const DEMO_SPECS = [
  { title: 'Lo-Fi Pulse',    artist: 'Demo',    bpm: 88,  key: '8A', seconds: 180, hue: 4 },
  { title: 'Deep House',     artist: 'Demo',    bpm: 122, key: '7A', seconds: 180, hue: 7 },
  { title: 'Synth Dream',    artist: 'Demo',    bpm: 110, key: '9A', seconds: 180, hue: 9 },
  { title: 'Techno Loop',    artist: 'Demo',    bpm: 128, key: '10A', seconds: 180, hue: 2 },
];

// ============ HELP MODAL ============
// First-time guide for non-DJs. Triggered from the bottom-right "?" hint chip.
function HelpModal({ onClose }) {
  return (
    <div className="help-backdrop" onClick={onClose}>
      <div className="help-modal" onClick={(e) => e.stopPropagation()}>
        <button className="help-close" onClick={onClose} aria-label="Close help">×</button>
        <div className="help-title">Quick start</div>
        <div className="help-sub">Brand new to mixing? Read this. Takes 30 seconds.</div>

        <div className="help-section">
          <div className="help-step">
            <div className="help-num">1</div>
            <div>
              <div className="help-step-title">Add some music</div>
              <div className="help-step-body">Paste a YouTube or SoundCloud link up top, or drop your own MP3/WAV files anywhere on the page. Tap <b>DEMOS</b> if you just want to try it.</div>
            </div>
          </div>
          <div className="help-step">
            <div className="help-num">2</div>
            <div>
              <div className="help-step-title">Easiest path: Automix</div>
              <div className="help-step-body">Hit <b>START AUTOMIX</b> in the right panel. The app will pick songs from your queue and blend them together — you don't have to do anything else.</div>
            </div>
          </div>
          <div className="help-step">
            <div className="help-num">3</div>
            <div>
              <div className="help-step-title">Or mix manually</div>
              <div className="help-step-body">Drag a track to <b>Deck A</b> (left) and another to <b>Deck B</b> (right). Hit <b>PLAY</b> on both, then click <b>MATCH</b> on one deck so the beats line up. Slide the big horizontal bar (the crossfader) at the bottom of the mixer to blend between them.</div>
            </div>
          </div>
          <div className="help-step">
            <div className="help-num">4</div>
            <div>
              <div className="help-step-title">Spice it up (optional)</div>
              <div className="help-step-body">Drag inside the <b>X/Y PAD</b> on each deck to sweep effects. Hit a <b>LOOP</b> pad (1, 4, 8) to repeat a section. Use <b>EQ</b> knobs to swap bass / mids / highs between songs — the classic DJ trick for clean transitions.</div>
            </div>
          </div>
        </div>

        <div className="help-section">
          <div className="help-h2">Words you'll see</div>
          <div className="help-glossary">
            <div><b>BPM</b> — beats per minute. How fast the song is.</div>
            <div><b>Key</b> — the musical key. Songs in the same or compatible keys sound good together.</div>
            <div><b>Match</b> — line up the tempos of two songs so the beats hit together.</div>
            <div><b>Crossfader</b> — the long slider at the bottom of the mixer that swaps between Deck A and Deck B.</div>
            <div><b>Cue</b> — a saved spot in the song. <b>CUE</b> jumps you back to it instantly.</div>
            <div><b>Quantize (Q)</b> — when ON, your loops and jumps automatically snap to the beat. Recommended for beginners.</div>
            <div><b>Master</b> — the deck whose beat the other one syncs to.</div>
          </div>
        </div>

        <div className="help-section">
          <div className="help-h2">Keyboard shortcuts</div>
          <div className="help-keys">
            <div><kbd>Q</kbd> Play / pause Deck A</div>
            <div><kbd>P</kbd> Play / pause Deck B</div>
            <div><kbd>W</kbd> Set cue point on Deck A</div>
            <div><kbd>O</kbd> Set cue point on Deck B</div>
            <div><kbd>Space</kbd> Start / stop Automix</div>
          </div>
        </div>

        <button className="help-done" onClick={onClose}>Got it — let's mix</button>
      </div>
    </div>
  );
}

// ============ APP ============
function App() {
  const [queue, setQueue] = useState([]);
  const [deckA, setDeckA] = useState(emptyDeck());
  const [deckB, setDeckB] = useState(emptyDeck());
  const [mixer, setMixer] = useState({ volA: 0.75, volB: 0.75, crossfader: 0.5, master: 0.8 });
  // Automix: when on, auto-loads queue tracks one after the other and crossfades.
  //   on        — master toggle
  //   fadeTime  — seconds the crossfader takes to swing
  //   playedIds — tracks already aired, so we don't repeat
  //   nextDeck  — which deck the next queued track will land on
  //   phase     — 'idle' | 'starting' | 'playing' | 'fading' | 'done' (for UI)
  //   fadeStart — timestamp when current fade began (for countdown UI)
  const [automix, setAutomix] = useState({ on: false, fadeTime: 8, playedIds: [], phase: 'idle', fadeStart: 0 });
  const [recording, setRecording] = useState(false);
  const [toast, setToast] = useState(null);
  const [helpOpen, setHelpOpen] = useState(false);
  const [audioStarted, setAudioStarted] = useState(false);
  const [loadingDemos, setLoadingDemos] = useState(false);
  const [sideTab, setSideTab] = useState('queue'); // 'queue' | 'spotify'
  // Explicit sync master. null = auto (first to play wins). 'a' | 'b' = user-pinned.
  const [masterDeck, setMasterDeck] = useState(null);
  // Global quantize: when on, cues/loops/jumps snap to the nearest beat-division.
  // division: 1 = whole bar (4 beats), 0.5 = half (2 beats), 0.25 = beat,
  // 0.125 = 1/8, 0.0625 = 1/16. Applied to beat-jump and loops.
  const [quantize, setQuantize] = useState({ on: true, division: 0.25 });
  // Live phase-error readouts (ms) for each deck — updated by drift watcher.
  const [phaseErr, setPhaseErr] = useState({ a: 0, b: 0 });

  function emptyDeck() {
    return {
      track: null,
      playing: false, progress: 0, cue: null, synced: false, keySync: false, loop: null, keylock: true,
      eq: { high: 0.5, mid: 0.5, low: 0.5 }, filter: 0.5,
      fx: {
        reverb: { on: false, amount: 0.4 },
        delay: { on: false, amount: 0.3, beatSync: true, beatDivision: 0.25 },
        echo: { on: false, amount: 0.3 },
        flanger: { on: false, amount: 0.3 },
        crush: { on: false, amount: 0.3 },
      },
    };
  }

  const flashToast = (msg) => { setToast(msg); setTimeout(() => setToast(null), 1800); };

  const patchTrackEverywhere = (trackId, patchOrFn) => {
    if (!trackId) return;
    const apply = (track) => {
      const patch = (typeof patchOrFn === 'function') ? patchOrFn(track) : patchOrFn;
      return { ...track, ...patch };
    };
    setQueue(q => q.map(tr => tr.id === trackId ? apply(tr) : tr));
    setDeckA(d => d.track?.id === trackId ? { ...d, track: apply(d.track) } : d);
    setDeckB(d => d.track?.id === trackId ? { ...d, track: apply(d.track) } : d);
  };

  const applyAnalysisToTrack = (trackId, analysis) => {
    if (!analysis) {
      patchTrackEverywhere(trackId, { analyzing: false });
      return;
    }
    patchTrackEverywhere(trackId, {
      bpm: analysis.bpm,
      __origBpm: analysis.bpm,
      firstBeat: analysis.firstBeat,
      beatGrid: analysis.beatGrid,
      analyzeConfidence: analysis.confidence,
      analyzing: false,
      analyzeProgress: 1,
    });
  };

  const analyzeAndPatchTrack = async (trackId, buffer) => {
    if (!buffer) {
      patchTrackEverywhere(trackId, { analyzing: false });
      return null;
    }
    try {
      patchTrackEverywhere(trackId, { analyzing: true, analyzeProgress: 0 });
      const analysis = await TrackAnalyzer.analyzeTrack(buffer, trackId, (p) => {
        patchTrackEverywhere(trackId, { analyzeProgress: p });
      });
      applyAnalysisToTrack(trackId, analysis);
      return analysis;
    } catch (e) {
      patchTrackEverywhere(trackId, { analyzing: false });
      return null;
    }
  };

  const hasTrustedAnalysis = (track) => {
    if (!track || !track.bpm) return false;
    const hasFirstBeat = Number.isFinite(Number(track.firstBeat));
    const hasGrid = Array.isArray(track.beatGrid) && track.beatGrid.length >= 8;
    const hasTempoMap = Array.isArray(track.tempoMap) && track.tempoMap.length >= 2;
    const conf = Number(track.analyzeConfidence);
    return hasFirstBeat && (hasGrid || hasTempoMap || (Number.isFinite(conf) && conf >= 0.55));
  };

  const analysisPatchFromTrack = (track) => ({
    firstBeat: Number.isFinite(Number(track.firstBeat)) ? Number(track.firstBeat) : 0,
    beatGrid: Array.isArray(track.beatGrid) ? track.beatGrid : undefined,
    tempoMap: Array.isArray(track.tempoMap) ? track.tempoMap : undefined,
    analyzeConfidence: Number.isFinite(Number(track.analyzeConfidence)) ? Number(track.analyzeConfidence) : undefined,
    analyzing: false,
    analyzeProgress: 1,
  });

  // ------ Seed demo tracks on audio start ------
  const seedDemos = () => {
    AudioEngine.ensureCtx();
    setLoadingDemos(true);
    const engineCtx = AudioEngine.ctx;
    const tracks = DEMO_SPECS.map(spec => {
      const buf = makeDemoBuffer(engineCtx, spec.seconds, spec.bpm, spec.hue);
      return makeTrack({ ...spec, buffer: buf });
    });
    setQueue(tracks);
    setLoadingDemos(false);
    flashToast(`Loaded ${tracks.length} demo tracks`);
  };

  // ------ Add files AND auto-load first one to a specific deck ------
  // Used by deck drop targets: drop an mp3 on Deck A → it appears in queue
  // and immediately loads on A. Subsequent files in the same drop just queue.
  const addFilesToDeck = async (side, fileList) => {
    const files = [...fileList];
    if (!files.length) return;
    let loadedTarget = false;
    for (let i = 0; i < files.length; i++) {
      const file = files[i];
      try {
        const buf = await AudioEngine.loadFile(file);
        const name = file.name.replace(/\.[^.]+$/, '');
        const placeholder = makeTrack({
          title: name, artist: 'Local file', buffer: buf,
          bpm: 120, firstBeat: 0, duration: buf.duration,
          key: KEYS[Math.floor(Math.random() * KEYS.length)],
        });
        placeholder.analyzing = true;
        placeholder.analyzeProgress = 0;
        setQueue(q => [...q, placeholder]);
        // First file of a deck-drop loads onto that deck immediately so the
        // user gets visual feedback. Analysis continues in the background and
        // patches both queue + deck via the deck-change persistence path.
        if (!loadedTarget && side) {
          loadedTarget = true;
          loadTo(side, placeholder);
        }
        await analyzeAndPatchTrack(placeholder.id, buf);
      } catch {
        flashToast(`Failed: ${file.name}`);
      }
    }
    flashToast(`Added ${files.length} track${files.length > 1 ? 's' : ''}`);
  };

  // ------ File upload ------
  const addFiles = async (fileList) => {
    const files = [...fileList];
    for (const file of files) {
      try {
        const buf = await AudioEngine.loadFile(file);
        const name = file.name.replace(/\.[^.]+$/, '');
        // Insert a placeholder so the user sees the track immediately,
        // then enrich with real analysis when it lands.
        const placeholder = makeTrack({
          title: name,
          artist: 'Local file',
          buffer: buf,
          bpm: 120,
          firstBeat: 0,
          duration: buf.duration,
          key: KEYS[Math.floor(Math.random() * KEYS.length)],
        });
        placeholder.analyzing = true;
        placeholder.analyzeProgress = 0;
        setQueue(q => [...q, placeholder]);

        // Run TrackAnalyzer (spectral-flux beat detection + IndexedDB cache).
        // This is non-blocking: progress streams via setQueue updates.
        await analyzeAndPatchTrack(placeholder.id, buf);
      } catch (e) {
        flashToast(`Failed: ${file.name}`);
      }
    }
    flashToast(`Added ${files.length} track${files.length > 1 ? 's' : ''}`);
  };

  const addTrack = (url) => {
    // Kept for compatibility with TopBar; now shows a hint.
    flashToast('Drop MP3/WAV files or click the upload button →');
  };

  // ------ YouTube URL / playlist fetch ------
  const handleYoutubeUrl = async (url) => {
    AudioEngine.ensureCtx();

    // --- SoundCloud: real in-browser playback via Widget API ---
    if (SoundCloudAdapter.isSoundCloudUrl(url)) {
      const placeholder = makeTrack({
        title: 'SoundCloud · loading…',
        artist: 'SoundCloud',
        bpm: 0, key: '—', duration: 0,
      });
      placeholder.loading = true;
      placeholder.progress = 0;
      placeholder.statusText = 'Fetching SoundCloud metadata';
      placeholder.source = 'soundcloud';
      setQueue(q => [...q, placeholder]);
      try {
        const meta = await SoundCloudAdapter.loadTrackMeta(url);
        // Fake BPM/key from URL hash (honest: SoundCloud doesn't expose these)
        const hash = Math.abs([...url].reduce((a, c) => (a * 31 + c.charCodeAt(0)) | 0, 0));
        const bpm = 95 + (hash % 45);
        const key = KEYS[hash % KEYS.length];
        setQueue(q => q.map(t => t.id === placeholder.id
          ? { ...t, loading: false, progress: 1,
              title: meta.title, artist: meta.artist, thumb: meta.thumb,
              bpm, firstBeat: 0, key, source: 'soundcloud', scUrl: url, duration: 0 }
          : t));
        flashToast(`SoundCloud: ${meta.title}`);
      } catch (e) {
        setQueue(q => q.filter(t => t.id !== placeholder.id));
        flashToast(`SoundCloud failed: ${e.message || 'error'}`);
      }
      return;
    }

    // --- YouTube: via backend (mock until deployed) ---
    const isPlaylist = !!BackendClient.playlistIdFromUrl(url) && !BackendClient.ytIdFromUrl(url);
    if (isPlaylist) {
      const placeholders = [];
      await BackendClient.fetchPlaylist(
        url,
        ({ index, total, ytId, title, artist }) => {
          const ph = makeTrack({
            title: title || `Playlist Track ${index + 1}`,
            artist: artist || 'Queued',
            bpm: 0, key: '—', duration: 0,
          });
          ph.loading = true;
          ph.progress = 0;
          ph.statusText = `Track ${index + 1}/${total}`;
          placeholders[index] = ph;
          setQueue(q => [...q, ph]);
        },
        ({ index, progress }) => {
          const ph = placeholders[index];
          if (!ph) return;
          setQueue(q => q.map(t => t.id === ph.id ? { ...t, progress } : t));
        },
        ({ index, track }) => {
          const ph = placeholders[index];
          if (!ph) return;
          const trusted = hasTrustedAnalysis(track);
          setQueue(q => q.map(t => t.id === ph.id
            ? { ...t, loading: false, progress: 1, buffer: track.buffer,
                bpm: track.bpm, __origBpm: track.bpm, key: track.key, duration: track.duration,
                title: track.title, artist: track.artist, thumb: track.thumb,
                ...analysisPatchFromTrack(track),
                analyzing: !trusted, analyzeProgress: trusted ? 1 : 0 }
            : t));
          if (!trusted) analyzeAndPatchTrack(ph.id, track.buffer);
        },
      );
      flashToast(`Playlist added`);
    } else {
      const ytId = BackendClient.ytIdFromUrl(url) || ('yt' + Date.now());
      const placeholder = makeTrack({
        title: `YouTube · ${ytId.slice(0, 6)}`,
        artist: 'Downloading…',
        bpm: 0, key: '—', duration: 0,
      });
      placeholder.loading = true;
      placeholder.progress = 0;
      placeholder.statusText = 'Fetching audio';
      placeholder.source = 'youtube';
      setQueue(q => [...q, placeholder]);
      try {
        const track = await BackendClient.fetchTrack(url, (p) => {
          setQueue(q => q.map(t => t.id === placeholder.id ? { ...t, progress: p } : t));
        });
        const trusted = hasTrustedAnalysis(track);
        setQueue(q => q.map(t => t.id === placeholder.id
          ? { ...t, loading: false, progress: 1, buffer: track.buffer,
              bpm: track.bpm, __origBpm: track.bpm, key: track.key, duration: track.duration,
              title: track.title, artist: track.artist, thumb: track.thumb, source: 'youtube',
              ...analysisPatchFromTrack(track),
              analyzing: !trusted, analyzeProgress: trusted ? 1 : 0 }
          : t));
        flashToast(`Added: ${track.title}`);
        // Refine BPM + firstBeat with our spectral-flux analyzer.
        // Backend's value is metadata-derived; ours is content-derived and
        // generally more accurate for sync-grade timing.
        if (!trusted) await analyzeAndPatchTrack(placeholder.id, track.buffer);
      } catch (e) {
        setQueue(q => q.filter(t => t.id !== placeholder.id));
        flashToast(`Failed: ${e.message || 'fetch error'}`);
      }
    }
  };

  // ------ Spotify handoff: enqueue YouTube audio + use Spotify metadata ------
  const handleSpotifyHandoff = async (spTrack) => {
    AudioEngine.ensureCtx();
    const url = spTrack.url;
    const ytId = spTrack.ytId || BackendClient.ytIdFromUrl(url) || ('yt' + Date.now());
    const placeholder = makeTrack({
      title: spTrack.title,
      artist: spTrack.artist,
      bpm: spTrack.bpm || 0,
      key: spTrack.key || '—',
      duration: spTrack.duration || 0,
    });
    placeholder.loading = true;
    placeholder.progress = 0;
    placeholder.statusText = 'Spotify → YouTube';
    placeholder.source = 'spotify';
    placeholder.thumb = spTrack.thumb;
    placeholder.spotifyId = spTrack.spotifyId;
    placeholder.ytTitle = spTrack.ytTitle;
    setQueue(q => [...q, placeholder]);
    try {
      const track = await BackendClient.fetchTrack(url, (p) => {
        setQueue(q => q.map(t => t.id === placeholder.id ? { ...t, progress: p } : t));
      });
      const trusted = hasTrustedAnalysis(track);
      setQueue(q => q.map(t => t.id === placeholder.id
        ? {
            ...t,
            loading: false, progress: 1, buffer: track.buffer,
            // For sync, prefer analysis of the actual fetched audio. Spotify
            // metadata remains useful, but the YouTube handoff audio must own
            // the beat grid.
            bpm: track.bpm || spTrack.bpm,
            __origBpm: track.bpm || spTrack.bpm,
            key: spTrack.key || track.key,
            duration: spTrack.duration || track.duration,
            title: spTrack.title,
            artist: spTrack.artist,
            thumb: spTrack.thumb || track.thumb,
            source: 'spotify',
            ...analysisPatchFromTrack(track),
            analyzing: !trusted,
            analyzeProgress: trusted ? 1 : 0,
          }
        : t));
      flashToast(`Added: ${spTrack.title}`);
      if (!trusted) await analyzeAndPatchTrack(placeholder.id, track.buffer);
    } catch (e) {
      setQueue(q => q.filter(t => t.id !== placeholder.id));
      flashToast(`Spotify handoff failed: ${e.message || 'fetch error'}`);
    }
  };

  // ------ Effective volumes ------
  const effA = useMemo(() => mixer.master * mixer.volA, [mixer]);
  const effB = useMemo(() => mixer.master * mixer.volB, [mixer]);

  // ------ Push mixer/deck changes to AudioEngine + SoundCloudAdapter ------
  useEffect(() => {
    AudioEngine.setChannelVolume('a', mixer.volA);
    AudioEngine.setChannelVolume('b', mixer.volB);
    AudioEngine.setMasterVolume(mixer.master);
    AudioEngine.setCrossfader(mixer.crossfader);
    // SC: compute effective per-side volume (equal-power)
    const xa = Math.cos((mixer.crossfader * Math.PI) / 2);
    const xb = Math.sin((mixer.crossfader * Math.PI) / 2);
    if (deckA.track?.source === 'soundcloud') SoundCloudAdapter.setVolume('a', mixer.master * mixer.volA * xa);
    if (deckB.track?.source === 'soundcloud') SoundCloudAdapter.setVolume('b', mixer.master * mixer.volB * xb);
  }, [mixer, deckA.track?.id, deckB.track?.id]);

  useEffect(() => { AudioEngine.setEQ('a', deckA.eq); }, [deckA.eq.low, deckA.eq.mid, deckA.eq.high]);
  useEffect(() => { AudioEngine.setFilter('a', deckA.filter); }, [deckA.filter]);
  useEffect(() => {
    Object.entries(deckA.fx).forEach(([fx, v]) => AudioEngine.setFX('a', fx, v));
  }, [deckA.fx]);
  useEffect(() => { AudioEngine.setEQ('b', deckB.eq); }, [deckB.eq.low, deckB.eq.mid, deckB.eq.high]);
  useEffect(() => { AudioEngine.setFilter('b', deckB.filter); }, [deckB.filter]);
  useEffect(() => {
    Object.entries(deckB.fx).forEach(([fx, v]) => AudioEngine.setFX('b', fx, v));
  }, [deckB.fx]);

  // ------ BPM-synced FX: keep delay's delayTime locked to a beat division ------
  // When `delay.beatSync` is on, the delay is exactly N beats of the deck's
  // current effective BPM (matches sync-engaged BPM, not the original).
  // Default division is 1/4 note (one beat). User can override per-deck via
  // delay.beatDivision: 0.0625=1/16, 0.125=1/8, 0.25=1/4, 0.5=1/2, 1=bar.
  useEffect(() => {
    const apply = (side, deck) => {
      if (!deck.track || deck.track.source === 'soundcloud') return;
      const dx = deck.fx.delay;
      if (!dx?.beatSync) return;
      const div = dx.beatDivision || 0.25;
      AudioEngine.setDelayBeatDivision(side, div, deck.track.bpm || 120);
    };
    apply('a', deckA);
    apply('b', deckB);
  }, [
    deckA.fx.delay?.beatSync, deckA.fx.delay?.beatDivision,
    deckB.fx.delay?.beatSync, deckB.fx.delay?.beatDivision,
    deckA.track?.bpm, deckB.track?.bpm,
  ]);

  // ------ Sync playback rate for tempo-matching ------
  // SYNC is exclusive: only the "slave" deck's rate changes; the master stays at 1x.
  // ------ Sync + manual BPM → playbackRate ------
  // playbackRate = targetBpm / originalBpm where:
  //   - targetBpm = other deck's bpm (if synced) else this deck's bpm
  //   - originalBpm = the detected BPM at load time (track.__origBpm, falls back to bpm)
  // This means: if user manually changes bpm via +/− or trim fader, the actual
  // playback speed changes to match — just like a real CDJ pitch fader.
  // SoundCloud tracks can't rate-shift (iframe sandbox) — metadata-only there.
  useEffect(() => {
    const aSc = deckA.track?.source === 'soundcloud';
    const bSc = deckB.track?.source === 'soundcloud';

    const rateFor = (self, other, synced) => {
      if (!self.track) return 1;
      const orig = self.track.__origBpm ?? self.track.bpm;
      const target = (synced && other.track) ? other.track.bpm : self.track.bpm;
      if (!orig || !target) return 1;
      let rate = target / orig;
      // Key-sync: when keySync is on AND keylock is OFF, we apply a small
      // additional rate adjustment so the slave's pitch lands on the master's
      // root. With keylock ON the engine pitch-shifts post-rate, so a key-sync
      // semitone offset there would cancel; we just skip in that case.
      // Camelot-distance heuristic: treat each Camelot-number step as roughly
      // ±7 semitones (perfect fifth) — too large for full compensation, so we
      // bound the additional shift to ±2 semitones (≈12% rate).
      if (synced && self.keySync && !self.keylock && other.track) {
        const semitoneShift = camelotSemitoneDistance(self.track.key, other.track.key);
        if (semitoneShift != null) {
          const bounded = Math.max(-2, Math.min(2, semitoneShift));
          rate *= Math.pow(2, bounded / 12);
        }
      }
      return rate;
    };

    if (!aSc && deckA.track) {
      AudioEngine.setPlaybackRate('a', rateFor(deckA, deckB, deckA.synced && !deckB.synced), deckA.keylock);
    }
    if (!bSc && deckB.track) {
      AudioEngine.setPlaybackRate('b', rateFor(deckB, deckA, deckB.synced && !deckA.synced), deckB.keylock);
    }
  }, [
    deckA.synced, deckB.synced,
    deckA.track?.bpm, deckB.track?.bpm,
    deckA.track?.__origBpm, deckB.track?.__origBpm,
    deckA.track?.source, deckB.track?.source,
    deckA.track?.id, deckB.track?.id,
    deckA.track?.key, deckB.track?.key,
    deckA.keylock, deckB.keylock,
    deckA.keySync, deckB.keySync,
  ]);

  // ------ Play/pause sync ------
  useEffect(() => {
    if (deckA.track?.source === 'soundcloud') {
      if (deckA.playing) SoundCloudAdapter.play('a'); else SoundCloudAdapter.pause('a');
    } else {
      if (deckA.playing) AudioEngine.play('a'); else AudioEngine.pause('a');
    }
  }, [deckA.playing, deckA.track?.id]);
  useEffect(() => {
    if (deckB.track?.source === 'soundcloud') {
      if (deckB.playing) SoundCloudAdapter.play('b'); else SoundCloudAdapter.pause('b');
    } else {
      if (deckB.playing) AudioEngine.play('b'); else AudioEngine.pause('b');
    }
  }, [deckB.playing, deckB.track?.id]);

  // ------ Effective sync master ------
  // If user pinned a master via the MASTER button, use it. Otherwise auto-pick:
  //   1. If exactly one deck is playing → that deck.
  //   2. If both playing → whichever has been playing longer (heuristic: lower progress
  //      means it was just dropped, so the OTHER one — the one already running — is master).
  //   3. Else null.
  const effectiveMaster = useMemo(() => {
    if (masterDeck === 'a' && deckA.track) return 'a';
    if (masterDeck === 'b' && deckB.track) return 'b';
    const aOn = deckA.track && deckA.playing;
    const bOn = deckB.track && deckB.playing;
    if (aOn && !bOn) return 'a';
    if (bOn && !aOn) return 'b';
    if (aOn && bOn) return (deckA.progress >= deckB.progress) ? 'a' : 'b';
    if (deckA.track) return 'a';
    if (deckB.track) return 'b';
    return null;
  }, [masterDeck, deckA.track?.id, deckB.track?.id, deckA.playing, deckB.playing, deckA.progress, deckB.progress]);

  // ------ Phase-align slave at the moment SYNC engages (or its track loads) ------
  // Without this, sync just matches tempo — kicks land off-beat. With it,
  // hitting SYNC + PLAY drops you straight into a beat-locked mix.
  useEffect(() => {
    const slaveSide = deckA.synced ? 'a' : (deckB.synced ? 'b' : null);
    if (!slaveSide) return;
    const masterSide = slaveSide === 'a' ? 'b' : 'a';
    const slave = slaveSide === 'a' ? deckA : deckB;
    const master = slaveSide === 'a' ? deckB : deckA;
    if (!slave.track || !master.track) return;
    if (slave.track.source === 'soundcloud' || master.track.source === 'soundcloud') return;
    if (slave.track.analyzing || master.track.analyzing || !slave.track.bpm || !master.track.bpm) return;
    // Compute master's current beat phase, then snap slave to the nearest beat.
    // Keep this deliberately simple: tempo lock first, one phase snap second.
    // Continuous correction below is soft-only and never hard-seeks.
    const mPhase = AudioEngine.getBeatPhase(masterSide, master.track.firstBeat || 0, master.track.bpm);
    const slaveOrig = slave.track.__origBpm ?? slave.track.bpm;
    const baseRate = slaveOrig ? master.track.bpm / slaveOrig : 1;
    AudioEngine.phaseAlign(slaveSide, mPhase, slave.track.firstBeat || 0, slave.track.bpm, baseRate);
  }, [
    deckA.synced, deckB.synced,
    deckA.track?.id, deckB.track?.id,
    deckA.track?.analyzing, deckB.track?.analyzing,
    deckA.track?.bpm, deckB.track?.bpm,
    deckA.track?.firstBeat, deckB.track?.firstBeat,
    // Re-align if the slave was just played from pause (phase resets relative to master).
    deckA.synced && deckA.playing, deckB.synced && deckB.playing,
  ]);

  // ------ 10Hz drift watcher: tiny continuous correction while both decks play ------
  useEffect(() => {
    const slaveSide = deckA.synced ? 'a' : (deckB.synced ? 'b' : null);
    if (!slaveSide) { setPhaseErr({ a: 0, b: 0 }); return; }
    const masterSide = slaveSide === 'a' ? 'b' : 'a';
    const slave = slaveSide === 'a' ? deckA : deckB;
    const master = slaveSide === 'a' ? deckB : deckA;
    if (!slave.playing || !master.playing) return;
    if (!slave.track || !master.track) return;
    if (slave.track.source === 'soundcloud' || master.track.source === 'soundcloud') return;
    if (slave.track.analyzing || master.track.analyzing || !slave.track.bpm || !master.track.bpm) return;
    // Capture stable values up front so the closure doesn't keep reading
    // from the (possibly stale) component scope.
    const slaveBpm = slave.track.bpm;
    const slaveFirstBeat = slave.track.firstBeat || 0;
    const slaveOrigBpm = slave.track.__origBpm ?? slaveBpm;
    const masterBpm = master.track.bpm;
    const masterFirstBeat = master.track.firstBeat || 0;
    const baseRate = slaveOrigBpm ? masterBpm / slaveOrigBpm : 1;
    const h = setInterval(() => {
      const mPhase = AudioEngine.getBeatPhase(masterSide, masterFirstBeat, masterBpm);
      const errMs = AudioEngine.getPhaseErrorMs(slaveSide, mPhase, slaveFirstBeat, slaveBpm);
      setPhaseErr(prev => ({ ...prev, [slaveSide]: errMs }));
      AudioEngine.nudgePhase(slaveSide, mPhase, slaveFirstBeat, slaveBpm, baseRate);
    }, 100);
    return () => clearInterval(h);
  }, [
    deckA.synced, deckB.synced,
    deckA.playing, deckB.playing,
    deckA.track?.id, deckB.track?.id,
    deckA.track?.bpm, deckB.track?.bpm,
    deckA.track?.firstBeat, deckB.track?.firstBeat,
    deckA.track?.analyzing, deckB.track?.analyzing,
  ]);

  // ------ Progress ticker from AudioEngine (skip SC tracks — they update via onProgress) ------
  useEffect(() => {
    const h = setInterval(() => {
      if (deckA.track && deckA.track.source !== 'soundcloud') {
        const ct = AudioEngine.getCurrentTime('a');
        const dur = AudioEngine.getDuration('a') || deckA.track.duration || 1;
        const np = Math.max(0, Math.min(1, ct / dur));
        setDeckA(prev => {
          if (!prev.track) return prev;
          if (prev.loop && np >= prev.loop.end) {
            AudioEngine.seek('a', prev.loop.start * dur);
            return { ...prev, progress: prev.loop.start };
          }
          if (Math.abs(np - prev.progress) < 0.001 && prev.playing === AudioEngine.isPlaying('a')) return prev;
          return { ...prev, progress: np, playing: AudioEngine.isPlaying('a') && np < 0.999 };
        });
      }
      if (deckB.track && deckB.track.source !== 'soundcloud') {
        const ct = AudioEngine.getCurrentTime('b');
        const dur = AudioEngine.getDuration('b') || deckB.track.duration || 1;
        const np = Math.max(0, Math.min(1, ct / dur));
        setDeckB(prev => {
          if (!prev.track) return prev;
          if (prev.loop && np >= prev.loop.end) {
            AudioEngine.seek('b', prev.loop.start * dur);
            return { ...prev, progress: prev.loop.start };
          }
          if (Math.abs(np - prev.progress) < 0.001 && prev.playing === AudioEngine.isPlaying('b')) return prev;
          return { ...prev, progress: np, playing: AudioEngine.isPlaying('b') && np < 0.999 };
        });
      }
    }, 80);
    return () => clearInterval(h);
  }, [deckA.track?.id, deckB.track?.id]);

  // ------ VU meters from AudioEngine analyser (real levels) ------
  const [vuA, setVuA] = useState(0);
  const [vuB, setVuB] = useState(0);
  useEffect(() => {
    const h = setInterval(() => {
      setVuA(AudioEngine.getLevel('a'));
      setVuB(AudioEngine.getLevel('b'));
    }, 60);
    return () => clearInterval(h);
  }, []);

  // ------ Automix ------
  // Behavior, beginner-friendly:
  //   1. START AUTOMIX with empty decks + queue tracks → auto-load q[0]→A (play),
  //      q[1]→B (cued). Crossfader snaps to A.
  //   2. Once outgoing deck passes 85% progress AND incoming deck has audio,
  //      start a smooth crossfade over `fadeTime` seconds.
  //   3. Once fade settles (crossfader fully on incoming side), clear the spent
  //      deck and pre-stage the next un-played queue track on it.
  //   4. When queue is exhausted → finish current fade and stop.
  // Tracks are skipped if SoundCloud (we can't auto-sync those reliably).
  const automixRef = useRef(automix);
  useEffect(() => { automixRef.current = automix; }, [automix]);

  // Pick the next track from queue that isn't playing on either deck and hasn't aired.
  const pickNextAutomix = (q, played, excludeIds) => {
    return q.find(t =>
      !excludeIds.includes(t.id) &&
      !played.includes(t.id) &&
      !t.loading &&
      !t.analyzing &&
      t.buffer &&
      t.bpm &&
      t.source !== 'soundcloud' // smooth-sync only works on web-audio tracks
    );
  };

  // When the user flips automix on, auto-stage decks. Does NOT need to wait for
  // the interval — we want immediate feedback ("ON AIR" → music starts now).
  useEffect(() => {
    if (!automix.on) {
      // turning off: leave decks as-is, just reset phase
      if (automix.phase !== 'idle') setAutomix(a => ({ ...a, phase: 'idle', fadeStart: 0 }));
      return;
    }
    if (queue.length === 0) {
      flashToast('Add tracks to the queue first');
      setAutomix(a => ({ ...a, on: false }));
      return;
    }

    // 1) Both decks empty → load A (play) and B (cued)
    if (!deckA.track && !deckB.track) {
      const first = pickNextAutomix(queue, automix.playedIds, []);
      if (!first) {
        flashToast('No web-audio tracks to automix');
        setAutomix(a => ({ ...a, on: false }));
        return;
      }
      const second = pickNextAutomix(queue, automix.playedIds, [first.id]);
      loadTo('a', first).then(() => {
        setDeckA(p => ({ ...p, playing: true }));
        // Snap crossfader to A so the start isn't muffled
        setMixer(m => ({ ...m, crossfader: 0 }));
      });
      if (second) {
        // small delay so deck A loads first
        setTimeout(() => loadTo('b', second), 250);
      }
      setAutomix(a => ({ ...a, phase: 'playing', playedIds: [...a.playedIds, first.id] }));
      flashToast('Automix started');
      return;
    }

    // 2) One deck has a track but nothing playing → start it
    if (deckA.track && !deckA.playing && !deckB.playing) {
      setDeckA(p => ({ ...p, playing: true }));
      setMixer(m => ({ ...m, crossfader: 0 }));
      setAutomix(a => ({ ...a, phase: 'playing', playedIds: a.playedIds.includes(deckA.track.id) ? a.playedIds : [...a.playedIds, deckA.track.id] }));
      // pre-stage B if empty
      if (!deckB.track) {
        const next = pickNextAutomix(queue, automix.playedIds.concat(deckA.track.id), [deckA.track.id]);
        if (next) loadTo('b', next);
      }
      return;
    }
    if (deckB.track && !deckB.playing && !deckA.playing) {
      setDeckB(p => ({ ...p, playing: true }));
      setMixer(m => ({ ...m, crossfader: 1 }));
      setAutomix(a => ({ ...a, phase: 'playing', playedIds: a.playedIds.includes(deckB.track.id) ? a.playedIds : [...a.playedIds, deckB.track.id] }));
      if (!deckA.track) {
        const next = pickNextAutomix(queue, automix.playedIds.concat(deckB.track.id), [deckB.track.id]);
        if (next) loadTo('a', next);
      }
      return;
    }
  }, [automix.on]);

  // Continuous automix tick — runs while automix is on
  useEffect(() => {
    if (!automix.on) return;
    const FADE_TRIGGER = 0.85; // outgoing must be past this to start fading
    const TICK = 50;

    const h = setInterval(() => {
      const a = automixRef.current;
      if (!a.on) return;

      const aProg = deckA.progress || 0;
      const bProg = deckB.progress || 0;
      const aHas = !!deckA.track;
      const bHas = !!deckB.track;
      const aPlay = aHas && deckA.playing;
      const bPlay = bHas && deckB.playing;

      // Determine outgoing (the deck currently dominating the crossfader)
      const xf = mixer.crossfader;
      const outgoing = xf < 0.5 ? 'a' : 'b';
      const incoming = outgoing === 'a' ? 'b' : 'a';
      const outProg = outgoing === 'a' ? aProg : bProg;
      const outHas = outgoing === 'a' ? aHas : bHas;
      const inHas = incoming === 'a' ? aHas : bHas;
      const inPlay = incoming === 'a' ? aPlay : bPlay;

      // If outgoing has nothing, but incoming has music → swap roles immediately
      if (!outHas && inHas) {
        setMixer(m => ({ ...m, crossfader: incoming === 'a' ? 0 : 1 }));
        return;
      }
      // If neither has music, try to load
      if (!outHas && !inHas) {
        const next = pickNextAutomix(queue, a.playedIds, []);
        if (!next) {
          flashToast('Automix: queue exhausted');
          setAutomix(s => ({ ...s, on: false, phase: 'done' }));
          return;
        }
        loadTo('a', next).then(() => setDeckA(p => ({ ...p, playing: true })));
        setMixer(m => ({ ...m, crossfader: 0 }));
        setAutomix(s => ({ ...s, playedIds: [...s.playedIds, next.id] }));
        return;
      }

      // Outgoing past trigger → begin/continue fade
      if (outHas && outProg > FADE_TRIGGER) {
        // Make sure incoming has a track + is playing
        if (!inHas) {
          const excludeIds = [deckA.track?.id, deckB.track?.id].filter(Boolean);
          const next = pickNextAutomix(queue, a.playedIds, excludeIds);
          if (next) {
            loadTo(incoming, next).then(() => {
              if (incoming === 'a') setDeckA(p => ({ ...p, playing: true, synced: true }));
              else setDeckB(p => ({ ...p, playing: true, synced: true }));
            });
            setAutomix(s => ({ ...s, playedIds: [...s.playedIds, next.id], phase: 'fading', fadeStart: s.fadeStart || Date.now() }));
          } else {
            // no more tracks — let outgoing finish, then stop
            if (outProg > 0.99) {
              flashToast('Automix: queue finished');
              setAutomix(s => ({ ...s, on: false, phase: 'done' }));
            }
          }
          return;
        }
        if (!inPlay) {
          if (incoming === 'a') setDeckA(p => p.track ? { ...p, playing: true, synced: true } : p);
          else setDeckB(p => p.track ? { ...p, playing: true, synced: true } : p);
        }
        // Glide crossfader toward incoming side
        setMixer(m => {
          const target = incoming === 'a' ? 0 : 1;
          const step = 1 / (a.fadeTime * (1000 / TICK));
          const dir = target > m.crossfader ? 1 : -1;
          const nc = m.crossfader + dir * step;
          const done = (dir > 0 && nc >= target) || (dir < 0 && nc <= target);
          return { ...m, crossfader: done ? target : Math.max(0, Math.min(1, nc)) };
        });
        if (a.phase !== 'fading') setAutomix(s => ({ ...s, phase: 'fading', fadeStart: s.fadeStart || Date.now() }));

        // Fade settled? Clear outgoing deck and pre-stage next
        const settled = (incoming === 'a' && mixer.crossfader < 0.02) || (incoming === 'b' && mixer.crossfader > 0.98);
        if (settled) {
          if (outgoing === 'a') {
            AudioEngine.pause('a');
            setDeckA(emptyDeck());
          } else {
            AudioEngine.pause('b');
            setDeckB(emptyDeck());
          }
          // Pre-stage the next track on the freed deck
          const excludeIds = [deckA.track?.id, deckB.track?.id].filter(Boolean);
          const nextUp = pickNextAutomix(queue, a.playedIds, excludeIds);
          if (nextUp) {
            setTimeout(() => loadTo(outgoing, nextUp), 200);
            setAutomix(s => ({ ...s, playedIds: [...s.playedIds, nextUp.id], phase: 'playing', fadeStart: 0 }));
          } else {
            setAutomix(s => ({ ...s, phase: 'playing', fadeStart: 0 }));
          }
        }
        return;
      }

      // No fade pending: if incoming deck is empty and we have queue, pre-stage it
      // so the next transition is instant.
      if (!inHas && a.phase === 'playing') {
        const excludeIds = [deckA.track?.id, deckB.track?.id].filter(Boolean);
        const next = pickNextAutomix(queue, a.playedIds, excludeIds);
        if (next) {
          loadTo(incoming, next);
          setAutomix(s => ({ ...s, playedIds: [...s.playedIds, next.id] }));
        }
      }
    }, TICK);
    return () => clearInterval(h);
  }, [automix.on, deckA, deckB, queue, mixer.crossfader]);

  const loadTo = async (side, trackOrId) => {
    const setD = side === 'a' ? setDeckA : setDeckB;
    const track = (typeof trackOrId === 'string')
      ? queue.find(t => t.id === trackOrId)
      : trackOrId;
    if (!track || typeof track !== 'object') {
      flashToast('That track is not available anymore');
      return;
    }
    if (track.loading) {
      flashToast('Track is still loading');
      return;
    }
    if (track.source !== 'soundcloud' && !track.buffer) {
      flashToast('No playable audio found for that track');
      return;
    }
    if (track.source === 'soundcloud') {
      // Tear down any previous SC widget on this side, and stop web-audio side
      AudioEngine.pause(side);
      AudioEngine.setTrack(side, null);
      SoundCloudAdapter.destroy(side);
      setD(p => ({ ...emptyDeck(), track: { ...track, duration: track.duration || 1 }, eq: p.eq, filter: p.filter, fx: p.fx }));
      flashToast(`Loading SoundCloud to Deck ${side.toUpperCase()}…`);
      try {
        const w = await SoundCloudAdapter.attachWidget(side, track.scUrl);
        // Apply current volume
        const vol = side === 'a' ? mixer.volA : mixer.volB;
        const xfader = side === 'a' ? Math.cos((mixer.crossfader * Math.PI) / 2) : Math.sin((mixer.crossfader * Math.PI) / 2);
        SoundCloudAdapter.setVolume(side, mixer.master * vol * xfader);
        // Subscribe to progress
        SoundCloudAdapter.onProgress(side, (pos, dur) => {
          const np = Math.max(0, Math.min(1, pos / (dur || 1)));
          setD(prev => {
            if (!prev.track) return prev;
            if (Math.abs(np - prev.progress) < 0.001 && prev.playing === SoundCloudAdapter.isPlaying(side)) return prev;
            return { ...prev, progress: np, playing: SoundCloudAdapter.isPlaying(side) && np < 0.999 };
          });
        });
        // Patch real duration into the track
        setD(p => p.track ? { ...p, track: { ...p.track, duration: w.duration } } : p);
        flashToast(`Loaded SoundCloud to Deck ${side.toUpperCase()}`);
      } catch (e) {
        flashToast(`SoundCloud load failed: ${e.message}`);
      }
      return;
    }
    // Web Audio path
    SoundCloudAdapter.destroy(side);
    AudioEngine.setTrack(side, track.buffer);
    setD(p => ({ ...emptyDeck(), track, eq: p.eq, filter: p.filter, fx: p.fx }));
    // ✨ AUTO-MATCH: if the OTHER deck has a track loaded (anywhere — playing
    // or paused), auto-engage sync on this newly-loaded deck so the user
    // doesn't have to click MATCH manually. Skip if other deck is SoundCloud
    // (we can't sync those). Skip if user already has sync engaged elsewhere.
    const other = side === 'a' ? deckB : deckA;
    const canTempoLock = (t) => t && t.source !== 'soundcloud' && !t.loading && !t.analyzing && t.buffer && t.bpm;
    if (canTempoLock(track) && canTempoLock(other?.track)) {
      // Wait a beat for state to settle, then engage sync on the new deck.
      // The existing rate + phaseAlign effects pick this up automatically.
      setTimeout(() => {
        setD(p => p.track?.id === track.id ? { ...p, synced: true } : p);
      }, 120);
      flashToast(`Auto-matched to Deck ${side === 'a' ? 'B' : 'A'}`);
    } else if (track.analyzing) {
      flashToast(`Loaded to Deck ${side.toUpperCase()} - analyzing beats`);
    } else {
      flashToast(`Loaded to Deck ${side.toUpperCase()}`);
    }
  };
  const removeTrack = (id) => setQueue(q => q.filter(t => t.id !== id));
  const reorder = (from, to) => {
    setQueue(q => { const nq = [...q]; const [m] = nq.splice(from, 1); nq.splice(to, 0, m); return nq; });
  };

  // Scrub handler — seek the real audio
  const handleDeckChange = (side) => (next) => {
    const prev = side === 'a' ? deckA : deckB;
    const setD = side === 'a' ? setDeckA : setDeckB;
    // Seek when user scrubs (large progress delta) OR when a new loop is armed.
    // The tick's own loop-wrap already issues its own AudioEngine.seek.
    const loopArmed = (next.loop?.start !== prev.loop?.start) || (next.loop?.end !== prev.loop?.end);
    const scrubbed = Math.abs(next.progress - prev.progress) > 0.02;
    if (next.track && (scrubbed || loopArmed) && !prev.playing === !next.playing) {
      const secs = next.progress * (next.track.duration || 1);
      if (next.track.source === 'soundcloud') SoundCloudAdapter.seek(side, secs);
      else AudioEngine.seek(side, secs);
    }
    // If the deck mutated track metadata (manual grid nudge), persist the same
    // patch back into the queue so it survives reload-into-deck.
    if (next.track && prev.track && next.track.id === prev.track.id && next.track !== prev.track) {
      const patched = next.track;
      setQueue(q => q.map(qt => qt.id === patched.id ? {
        ...qt,
        firstBeat: patched.firstBeat,
        bpm: patched.bpm,
        __origBpm: patched.__origBpm ?? patched.bpm,
        beatGrid: patched.beatGrid ?? qt.beatGrid,
        analyzeConfidence: patched.analyzeConfidence ?? qt.analyzeConfidence,
      } : qt));
    }
    setD(next);
  };

  // Keyboard
  useEffect(() => {
    const onKey = (e) => {
      if (e.target.tagName === 'INPUT') return;
      if (e.code === 'KeyQ') setDeckA(p => p.track ? { ...p, playing: !p.playing } : p);
      if (e.code === 'KeyP') setDeckB(p => p.track ? { ...p, playing: !p.playing } : p);
      if (e.code === 'KeyW') setDeckA(p => p.track ? { ...p, cue: p.progress } : p);
      if (e.code === 'KeyO') setDeckB(p => p.track ? { ...p, cue: p.progress } : p);
      if (e.code === 'Space') { e.preventDefault(); setAutomix(a => ({ ...a, on: !a.on })); }
    };
    window.addEventListener('keydown', onKey);
    return () => window.removeEventListener('keydown', onKey);
  }, []);

  // Hidden file input
  const fileInputRef = useRef(null);
  const openFilePicker = () => fileInputRef.current?.click();

  // Global drag & drop
  useEffect(() => {
    const onDragOver = (e) => { e.preventDefault(); };
    const onDrop = async (e) => {
      e.preventDefault();
      if (e.dataTransfer?.files?.length) {
        await addFiles(e.dataTransfer.files);
      }
    };
    window.addEventListener('dragover', onDragOver);
    window.addEventListener('drop', onDrop);
    return () => {
      window.removeEventListener('dragover', onDragOver);
      window.removeEventListener('drop', onDrop);
    };
  }, []);

  // ------ Beat-jump helper: respects quantize division (defaults to 1 beat) ------
  const beatJump = (side, beats) => {
    const deck = side === 'a' ? deckA : deckB;
    const setD = side === 'a' ? setDeckA : setDeckB;
    if (!deck.track || deck.track.source === 'soundcloud') return;
    AudioEngine.beatJump(side, beats, deck.track.firstBeat || 0, deck.track.bpm);
    const dur = AudioEngine.getDuration(side) || deck.track.duration || 1;
    const ct = AudioEngine.getCurrentTime(side);
    setD(p => ({ ...p, progress: Math.max(0, Math.min(1, ct / dur)) }));
  };

  // ------ Manual phase-align: nudge the slave deck so its beats line up
  // with the master deck right now. Used by the "Fix beats" button in each deck. ------
  const alignNow = (slaveSide) => {
    const masterSide = slaveSide === 'a' ? 'b' : 'a';
    const slave = slaveSide === 'a' ? deckA : deckB;
    const master = slaveSide === 'a' ? deckB : deckA;
    if (!slave.track || !master.track) return;
    if (slave.track.source === 'soundcloud' || master.track.source === 'soundcloud') return;
    if (slave.track.analyzing || master.track.analyzing || !slave.track.bpm || !master.track.bpm) {
      flashToast('Beat analysis is still running');
      return;
    }
    const mPhase = AudioEngine.getBeatPhase(masterSide, master.track.firstBeat || 0, master.track.bpm);
    const slaveOrig = slave.track.__origBpm ?? slave.track.bpm;
    const baseRate = slaveOrig ? master.track.bpm / slaveOrig : 1;
    AudioEngine.phaseAlign(slaveSide, mPhase, slave.track.firstBeat || 0, slave.track.bpm, baseRate);
  };

  return (
    <div className="app">
      <TopBar onRecToggle={() => setRecording(r => !r)} recording={recording}
              onUpload={openFilePicker} onLoadDemos={seedDemos} loadingDemos={loadingDemos} />
      <input ref={fileInputRef} type="file" accept="audio/*" multiple style={{ display: 'none' }}
             onChange={(e) => { addFiles(e.target.files); e.target.value = ''; }} />
      <div className="main">
        <Deck side="a" state={deckA} onChange={handleDeckChange('a')} otherState={deckB} onOtherChange={handleDeckChange('b')} automix={automix} isMaster={effectiveMaster === 'a'} masterPinned={masterDeck === 'a'} onSetMaster={() => setMasterDeck(masterDeck === 'a' ? null : 'a')} onLoadFiles={addFilesToDeck} queue={queue} onLoadFromQueue={loadTo} quantize={quantize} phaseErrMs={phaseErr.a} onBeatJump={(b) => beatJump('a', b)} onAlignNow={() => alignNow('a')} onYoutubeUrl={handleYoutubeUrl} onLoadDemos={seedDemos} loadingDemos={loadingDemos} onOpenSpotify={() => setSideTab('spotify')} />
        <Mixer deckA={deckA} deckB={deckB} mixer={mixer} onMixerChange={setMixer} vuA={vuA} vuB={vuB} quantize={quantize} onQuantizeChange={setQuantize} masterDeck={effectiveMaster} />
        <Deck side="b" state={deckB} onChange={handleDeckChange('b')} otherState={deckA} onOtherChange={handleDeckChange('a')} automix={automix} isMaster={effectiveMaster === 'b'} masterPinned={masterDeck === 'b'} onSetMaster={() => setMasterDeck(masterDeck === 'b' ? null : 'b')} onLoadFiles={addFilesToDeck} queue={queue} onLoadFromQueue={loadTo} quantize={quantize} phaseErrMs={phaseErr.b} onBeatJump={(b) => beatJump('b', b)} onAlignNow={() => alignNow('b')} onYoutubeUrl={handleYoutubeUrl} onLoadDemos={seedDemos} loadingDemos={loadingDemos} onOpenSpotify={() => setSideTab('spotify')} />
      </div>
      <div className="sidebar">
        <div className="sidebar-tabs">
          <button
            className={`sidebar-tab ${sideTab === 'queue' ? 'active' : ''}`}
            onClick={() => setSideTab('queue')}
          >
            <Icon name="list" size={12} /> Queue
            {queue.length > 0 && <span className="sidebar-tab-count">{queue.length}</span>}
          </button>
          <button
            className={`sidebar-tab ${sideTab === 'spotify' ? 'active' : ''}`}
            onClick={() => setSideTab('spotify')}
          >
            <span className="sidebar-sp-logo" /> Spotify
            {SpotifyClient.isConnected() && <span className="sidebar-tab-dot" />}
          </button>
        </div>
        <div className="sidebar-body">
          {sideTab === 'queue' && (
            <Queue
              queue={queue}
              onLoadTo={loadTo}
              onRemove={removeTrack}
              onReorder={reorder}
              automix={automix}
              onAutomixToggle={() => setAutomix(a => ({ ...a, on: !a.on, fadeStart: 0 }))}
              onFadeTimeChange={(s) => setAutomix(a => ({ ...a, fadeTime: s }))}
              deckA={deckA}
              deckB={deckB}
            />
          )}
          {sideTab === 'spotify' && (
            <SpotifyPanel onAddToQueue={handleSpotifyHandoff} />
          )}
        </div>
      </div>

      {!audioStarted && (
        <div className="audio-gate" onClick={() => {
          AudioEngine.resumeCtx();
          setAudioStarted(true);
          // auto-seed demos so user hears something immediately
          seedDemos();
        }}>
          <div className="audio-gate-card">
            <div className="ag-mark"><Icon name="play" size={20}/></div>
            <div>
              <div className="ag-title">Welcome to djanything.com</div>
              <div className="ag-sub">Mix anything on the internet. Click to start — we'll load 4 demo tracks so you can mix right away.<br/>Drag & drop MP3s, or hit Upload to add your own.</div>
            </div>
          </div>
        </div>
      )}

      {toast && <div className="toast">{toast}</div>}
      <div className="hint-chip" title="Drag any track from the queue onto a deck to load it. Or drop MP3/WAV files anywhere on the page.">
        <kbd title="Play / pause Deck A">Q</kbd>/<kbd title="Play / pause Deck B">P</kbd> play · <kbd title="Start or stop the auto-mixer">␣</kbd> automix · Drop tracks anywhere to add
        <button
          className="hint-help"
          title="Open quick start guide"
          onClick={() => setHelpOpen(true)}
          aria-label="Help"
        >?</button>
      </div>
      {helpOpen && <HelpModal onClose={() => setHelpOpen(false)} />}
    </div>
  );
}

ReactDOM.createRoot(document.getElementById('root')).render(<App />);
