add system audio loopback for webaudio

This commit is contained in:
jeffvli
2026-04-05 00:48:38 -07:00
parent 25bb7f7069
commit 94886a2d5a
12 changed files with 257 additions and 28 deletions
+2
View File
@@ -1214,6 +1214,8 @@
"mainText": "drop a file here" "mainText": "drop a file here"
}, },
"visualizer": { "visualizer": {
"systemAudioCaptureFailed": "Could not start capture: {{message}}",
"systemAudioNoAudioTrack": "No audio track was returned. Ensure audio capture is enabled when prompted.",
"visualizerType": "Visualizer Type", "visualizerType": "Visualizer Type",
"cyclePresets": "Cycle Presets", "cyclePresets": "Cycle Presets",
"cycleTime": "Cycle Time (seconds)", "cycleTime": "Cycle Time (seconds)",
+17
View File
@@ -5,6 +5,7 @@ import {
app, app,
BrowserWindow, BrowserWindow,
BrowserWindowConstructorOptions, BrowserWindowConstructorOptions,
desktopCapturer,
globalShortcut, globalShortcut,
ipcMain, ipcMain,
Menu, Menu,
@@ -732,6 +733,22 @@ async function createWindow(first = true): Promise<void> {
return { action: 'deny' }; return { action: 'deny' };
}); });
mainWindow.webContents.session.setDisplayMediaRequestHandler((_request, callback) => {
desktopCapturer
.getSources({ types: ['screen'] })
.then((sources) => {
if (sources.length > 0) {
callback({ audio: 'loopback', video: sources[0] });
} else {
callback({});
}
})
.catch((err) => {
log.warn('desktopCapturer.getSources failed', err);
callback({});
});
});
if (!disableAutoUpdates() && store.get('disable_auto_updates') !== true) { if (!disableAutoUpdates() && store.get('disable_auto_updates') !== true) {
new AppUpdater(); new AppUpdater();
} }
+2
View File
@@ -11,6 +11,7 @@ import { lazy, memo, Suspense, useEffect, useMemo, useRef, useState } from 'reac
import i18n from '/@/i18n/i18n'; import i18n from '/@/i18n/i18n';
import { WebAudioContext } from '/@/renderer/features/player/context/webaudio-context'; import { WebAudioContext } from '/@/renderer/features/player/context/webaudio-context';
import { VisualizerSystemAudioBridge } from '/@/renderer/features/visualizer/components/visualizer-system-audio-bridge';
import { useCheckForUpdates } from '/@/renderer/hooks/use-check-for-updates'; import { useCheckForUpdates } from '/@/renderer/hooks/use-check-for-updates';
import { useNativeMenuSync } from '/@/renderer/hooks/use-native-menu-sync'; import { useNativeMenuSync } from '/@/renderer/hooks/use-native-menu-sync';
import { useSyncSettingsToMain } from '/@/renderer/hooks/use-sync-settings-to-main'; import { useSyncSettingsToMain } from '/@/renderer/hooks/use-sync-settings-to-main';
@@ -79,6 +80,7 @@ const AppShell = memo(function AppShell() {
<WebAudioContext.Provider value={webAudioProvider}> <WebAudioContext.Provider value={webAudioProvider}>
<PlayerProvider> <PlayerProvider>
<AudioPlayers /> <AudioPlayers />
<VisualizerSystemAudioBridge />
<AppRouter /> <AppRouter />
</PlayerProvider> </PlayerProvider>
</WebAudioContext.Provider> </WebAudioContext.Provider>
@@ -27,7 +27,7 @@ import {
import { ActionIcon, ActionIconGroup } from '/@/shared/components/action-icon/action-icon'; import { ActionIcon, ActionIconGroup } from '/@/shared/components/action-icon/action-icon';
import { Flex } from '/@/shared/components/flex/flex'; import { Flex } from '/@/shared/components/flex/flex';
import { Stack } from '/@/shared/components/stack/stack'; import { Stack } from '/@/shared/components/stack/stack';
import { ItemListKey, Platform, PlayerType } from '/@/shared/types/types'; import { ItemListKey, Platform } from '/@/shared/types/types';
type SidebarPanelType = 'lyrics' | 'queue' | 'visualizer'; type SidebarPanelType = 'lyrics' | 'queue' | 'visualizer';
@@ -55,9 +55,9 @@ export const SidebarPlayQueue = () => {
const showLyricsInSidebar = useShowLyricsInSidebar(); const showLyricsInSidebar = useShowLyricsInSidebar();
const showVisualizerInSidebar = useShowVisualizerInSidebar(); const showVisualizerInSidebar = useShowVisualizerInSidebar();
const sidebarPanelOrder = useSidebarPanelOrder(); const sidebarPanelOrder = useSidebarPanelOrder();
const { type, webAudio } = usePlaybackSettings(); const { webAudio } = usePlaybackSettings();
const { windowBarStyle } = useWindowSettings(); const { windowBarStyle } = useWindowSettings();
const showVisualizer = showVisualizerInSidebar && type === PlayerType.WEB && webAudio; const showVisualizer = showVisualizerInSidebar && webAudio;
const showPanel = showLyricsInSidebar || showVisualizer; const showPanel = showLyricsInSidebar || showVisualizer;
const shouldAddTopMargin = isElectron() && windowBarStyle === Platform.WEB; const shouldAddTopMargin = isElectron() && windowBarStyle === Platform.WEB;
@@ -374,8 +374,8 @@ const CombinedLyricsAndVisualizerPanel = () => {
const visualizerType = useSettingsStore((store) => store.visualizer.type); const visualizerType = useSettingsStore((store) => store.visualizer.type);
const showLyricsInSidebar = useShowLyricsInSidebar(); const showLyricsInSidebar = useShowLyricsInSidebar();
const showVisualizerInSidebar = useShowVisualizerInSidebar(); const showVisualizerInSidebar = useShowVisualizerInSidebar();
const { type, webAudio } = usePlaybackSettings(); const { webAudio } = usePlaybackSettings();
const showVisualizer = showVisualizerInSidebar && type === PlayerType.WEB && webAudio; const showVisualizer = showVisualizerInSidebar && webAudio;
const { data: lyricsData } = useQuery( const { data: lyricsData } = useQuery(
lyricsQueries.songLyrics( lyricsQueries.songLyrics(
@@ -15,7 +15,7 @@ import {
} from '/@/renderer/store/full-screen-player.store'; } from '/@/renderer/store/full-screen-player.store';
import { Button } from '/@/shared/components/button/button'; import { Button } from '/@/shared/components/button/button';
import { Group } from '/@/shared/components/group/group'; import { Group } from '/@/shared/components/group/group';
import { ItemListKey, PlayerType } from '/@/shared/types/types'; import { ItemListKey } from '/@/shared/types/types';
const AudioMotionAnalyzerVisualizer = lazy(() => const AudioMotionAnalyzerVisualizer = lazy(() =>
import('../../visualizer/components/audiomotionanalyzer/visualizer').then((module) => ({ import('../../visualizer/components/audiomotionanalyzer/visualizer').then((module) => ({
@@ -33,7 +33,7 @@ export const FullScreenPlayerQueue = () => {
const { t } = useTranslation(); const { t } = useTranslation();
const { activeTab, opacity } = useFullScreenPlayerStore(); const { activeTab, opacity } = useFullScreenPlayerStore();
const { setStore } = useFullScreenPlayerStoreActions(); const { setStore } = useFullScreenPlayerStoreActions();
const { type, webAudio } = usePlaybackSettings(); const { webAudio } = usePlaybackSettings();
const visualizerType = useSettingsStore((store) => store.visualizer.type); const visualizerType = useSettingsStore((store) => store.visualizer.type);
const headerItems = useMemo(() => { const headerItems = useMemo(() => {
@@ -55,7 +55,7 @@ export const FullScreenPlayerQueue = () => {
}, },
]; ];
if (type === PlayerType.WEB && webAudio) { if (webAudio) {
items.push({ items.push({
active: activeTab === 'visualizer', active: activeTab === 'visualizer',
label: t('page.fullscreenPlayer.visualizer', { postProcess: 'titleCase' }), label: t('page.fullscreenPlayer.visualizer', { postProcess: 'titleCase' }),
@@ -64,7 +64,7 @@ export const FullScreenPlayerQueue = () => {
} }
return items; return items;
}, [activeTab, setStore, t, type, webAudio]); }, [activeTab, setStore, t, webAudio]);
return ( return (
<div <div
@@ -119,7 +119,7 @@ export const FullScreenPlayerQueue = () => {
</div> </div>
) : activeTab === 'lyrics' ? ( ) : activeTab === 'lyrics' ? (
<Lyrics fadeOutNoLyricsMessage={false} /> <Lyrics fadeOutNoLyricsMessage={false} />
) : activeTab === 'visualizer' && type === PlayerType.WEB && webAudio ? ( ) : activeTab === 'visualizer' && webAudio ? (
<Suspense fallback={<></>}> <Suspense fallback={<></>}>
{visualizerType === 'butterchurn' ? ( {visualizerType === 'butterchurn' ? (
<ButterchurnVisualizer /> <ButterchurnVisualizer />
@@ -13,7 +13,7 @@ import {
useWindowSettings, useWindowSettings,
} from '/@/renderer/store/settings.store'; } from '/@/renderer/store/settings.store';
import { useHotkeys } from '/@/shared/hooks/use-hotkeys'; import { useHotkeys } from '/@/shared/hooks/use-hotkeys';
import { Platform, PlayerType } from '/@/shared/types/types'; import { Platform } from '/@/shared/types/types';
const AudioMotionAnalyzerVisualizer = lazy(() => const AudioMotionAnalyzerVisualizer = lazy(() =>
import('../../visualizer/components/audiomotionanalyzer/visualizer').then((module) => ({ import('../../visualizer/components/audiomotionanalyzer/visualizer').then((module) => ({
@@ -131,7 +131,7 @@ VisualizerContainer.displayName = 'VisualizerContainer';
export const FullScreenVisualizer = () => { export const FullScreenVisualizer = () => {
const { setStore } = useFullScreenPlayerStoreActions(); const { setStore } = useFullScreenPlayerStoreActions();
const { windowBarStyle } = useWindowSettings(); const { windowBarStyle } = useWindowSettings();
const { type, webAudio } = usePlaybackSettings(); const { webAudio } = usePlaybackSettings();
const visualizerType = useSettingsStore((store) => store.visualizer.type); const visualizerType = useSettingsStore((store) => store.visualizer.type);
const isMobile = useIsMobile(); const isMobile = useIsMobile();
@@ -155,7 +155,7 @@ export const FullScreenVisualizer = () => {
return ( return (
<VisualizerContainer isMobile={isMobile} windowBarStyle={windowBarStyle}> <VisualizerContainer isMobile={isMobile} windowBarStyle={windowBarStyle}>
<div className={styles.visualizerContainer}> <div className={styles.visualizerContainer}>
{type === PlayerType.WEB && webAudio ? ( {webAudio ? (
<Suspense fallback={<></>}> <Suspense fallback={<></>}>
{visualizerType === 'butterchurn' ? ( {visualizerType === 'butterchurn' ? (
<ButterchurnVisualizer /> <ButterchurnVisualizer />
@@ -0,0 +1,138 @@
import isElectron from 'is-electron';
import { useCallback, useEffect, useRef } from 'react';
import i18n from '/@/i18n/i18n';
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
import { usePlaybackType } from '/@/renderer/store/settings.store';
import { toast } from '/@/shared/components/toast/toast';
import { PlayerType } from '/@/shared/types/types';
export function useVisualizerSystemAudio() {
const playbackType = usePlaybackType();
const { setWebAudio, webAudio } = useWebAudio();
const webAudioRef = useRef(webAudio);
const streamRef = useRef<MediaStream | null>(null);
const sourceRef = useRef<MediaStreamAudioSourceNode | null>(null);
const connectInFlightRef = useRef(false);
useEffect(() => {
webAudioRef.current = webAudio;
}, [webAudio]);
const disconnect = useCallback(() => {
if (streamRef.current) {
streamRef.current.getTracks().forEach((t) => t.stop());
streamRef.current = null;
}
if (sourceRef.current) {
try {
sourceRef.current.disconnect();
} catch {
// ignore
}
sourceRef.current = null;
}
const w = webAudioRef.current;
if (w?.visualizerInputs?.length && setWebAudio) {
const next = { ...w, visualizerInputs: undefined };
setWebAudio(next);
webAudioRef.current = next;
}
}, [setWebAudio]);
useEffect(() => {
if (playbackType === PlayerType.WEB) {
disconnect();
}
}, [playbackType, disconnect]);
const connect = useCallback(async () => {
if (!isElectron()) {
return;
}
const w = webAudioRef.current;
if (!w?.context || w.context.state === 'closed') {
return;
}
if (!setWebAudio) return;
disconnect();
const wAfterDisconnect = webAudioRef.current;
if (!wAfterDisconnect?.context || wAfterDisconnect.context.state === 'closed') {
return;
}
connectInFlightRef.current = true;
try {
const stream = await navigator.mediaDevices.getDisplayMedia({
audio: true,
video: true,
});
const audioTracks = stream.getAudioTracks();
if (audioTracks.length === 0) {
stream.getTracks().forEach((t) => t.stop());
toast.error({ message: i18n.t('visualizer.systemAudioNoAudioTrack') });
return;
}
const latest = webAudioRef.current;
if (!latest?.context || latest.context.state === 'closed') {
stream.getTracks().forEach((t) => t.stop());
return;
}
try {
await latest.context.resume();
} catch {
// ignore
}
const source = latest.context.createMediaStreamSource(stream);
streamRef.current = stream;
sourceRef.current = source;
const next = { ...latest, visualizerInputs: [source] };
setWebAudio(next);
webAudioRef.current = next;
} catch (e) {
const name = (e as DOMException)?.name;
if (name === 'NotAllowedError' || name === 'AbortError') {
return;
}
toast.error({
message: i18n.t('visualizer.systemAudioCaptureFailed', {
message: (e as Error).message,
}),
});
} finally {
connectInFlightRef.current = false;
}
}, [disconnect, setWebAudio]);
const connectRef = useRef(connect);
connectRef.current = connect;
useEffect(() => {
if (playbackType !== PlayerType.LOCAL || !isElectron()) {
return;
}
const w = webAudioRef.current;
if (!w?.context || w.context.state === 'closed') {
return;
}
if (w.visualizerInputs?.length) {
return;
}
if (connectInFlightRef.current) {
return;
}
void connectRef.current();
}, [playbackType, webAudio?.context, webAudio?.visualizerInputs?.length]);
}
@@ -0,0 +1,14 @@
import type { WebAudio } from '/@/shared/types/types';
import { PlayerType } from '/@/shared/types/types';
export function getVisualizerAudioNodes(
webAudio: undefined | WebAudio,
playbackType: PlayerType,
): AudioNode[] {
if (!webAudio) return [];
if (playbackType === PlayerType.LOCAL) {
return webAudio.visualizerInputs ?? [];
}
return webAudio.gains;
}
@@ -3,9 +3,10 @@ import { createRef, useCallback, useEffect, useMemo, useRef, useState } from 're
import styles from './visualizer.module.css'; import styles from './visualizer.module.css';
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio'; import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
import { getVisualizerAudioNodes } from '/@/renderer/features/player/utils/get-visualizer-audio-nodes';
import { openVisualizerSettingsModal } from '/@/renderer/features/player/utils/open-visualizer-settings-modal'; import { openVisualizerSettingsModal } from '/@/renderer/features/player/utils/open-visualizer-settings-modal';
import { ComponentErrorBoundary } from '/@/renderer/features/shared/components/component-error-boundary'; import { ComponentErrorBoundary } from '/@/renderer/features/shared/components/component-error-boundary';
import { useAccent, useSettingsStore } from '/@/renderer/store'; import { useAccent, usePlaybackType, useSettingsStore } from '/@/renderer/store';
import { import {
useFullScreenPlayerStore, useFullScreenPlayerStore,
useFullScreenPlayerStoreActions, useFullScreenPlayerStoreActions,
@@ -18,6 +19,7 @@ const VisualizerInner = () => {
const canvasRef = createRef<HTMLDivElement>(); const canvasRef = createRef<HTMLDivElement>();
const accent = useAccent(); const accent = useAccent();
const visualizer = useSettingsStore((store) => store.visualizer); const visualizer = useSettingsStore((store) => store.visualizer);
const playbackType = usePlaybackType();
const opacity = useSettingsStore((store) => store.visualizer.audiomotionanalyzer.opacity); const opacity = useSettingsStore((store) => store.visualizer.audiomotionanalyzer.opacity);
const [motion, setMotion] = useState<any>(); const [motion, setMotion] = useState<any>();
const [libraryLoaded, setLibraryLoaded] = useState(false); const [libraryLoaded, setLibraryLoaded] = useState(false);
@@ -214,9 +216,10 @@ const VisualizerInner = () => {
); );
useEffect(() => { useEffect(() => {
const { context, gains } = webAudio || {}; const { context } = webAudio || {};
const inputNodes = getVisualizerAudioNodes(webAudio, playbackType);
let audioMotion: any | undefined; let audioMotion: any | undefined;
if (gains && context && canvasRef.current && !motion && libraryLoaded) { if (inputNodes.length > 0 && context && canvasRef.current && !motion && libraryLoaded) {
const AudioMotionAnalyzer = AudioMotionAnalyzerRef.current; const AudioMotionAnalyzer = AudioMotionAnalyzerRef.current;
if (!AudioMotionAnalyzer) return; if (!AudioMotionAnalyzer) return;
@@ -249,7 +252,7 @@ const VisualizerInner = () => {
registerCustomGradients(audioMotion); registerCustomGradients(audioMotion);
setMotion(audioMotion); setMotion(audioMotion);
for (const gain of gains) audioMotion.connectInput(gain); for (const node of inputNodes) audioMotion.connectInput(node);
} }
return () => { return () => {
@@ -262,6 +265,7 @@ const VisualizerInner = () => {
accent, accent,
canvasRef, canvasRef,
registerCustomGradients, registerCustomGradients,
playbackType,
webAudio, webAudio,
visualizer, visualizer,
options, options,
@@ -3,11 +3,13 @@ import { createRef, useCallback, useEffect, useRef, useState } from 'react';
import styles from './visualizer.module.css'; import styles from './visualizer.module.css';
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio'; import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
import { getVisualizerAudioNodes } from '/@/renderer/features/player/utils/get-visualizer-audio-nodes';
import { openVisualizerSettingsModal } from '/@/renderer/features/player/utils/open-visualizer-settings-modal'; import { openVisualizerSettingsModal } from '/@/renderer/features/player/utils/open-visualizer-settings-modal';
import { ComponentErrorBoundary } from '/@/renderer/features/shared/components/component-error-boundary'; import { ComponentErrorBoundary } from '/@/renderer/features/shared/components/component-error-boundary';
import { import {
subscribeButterchurnPreset, subscribeButterchurnPreset,
useButterchurnSettings, useButterchurnSettings,
usePlaybackType,
useSettingsStore, useSettingsStore,
useSettingsStoreActions, useSettingsStoreActions,
} from '/@/renderer/store'; } from '/@/renderer/store';
@@ -19,7 +21,7 @@ import { usePlayerStatus } from '/@/renderer/store/player.store';
import { ActionIcon } from '/@/shared/components/action-icon/action-icon'; import { ActionIcon } from '/@/shared/components/action-icon/action-icon';
import { Group } from '/@/shared/components/group/group'; import { Group } from '/@/shared/components/group/group';
import { Text } from '/@/shared/components/text/text'; import { Text } from '/@/shared/components/text/text';
import { PlayerStatus } from '/@/shared/types/types'; import { PlayerStatus, PlayerType } from '/@/shared/types/types';
// Ignore presets that are erroring out // Ignore presets that are erroring out
const IGNORED_PRESETS = ['Flexi + Martin - astral projection']; const IGNORED_PRESETS = ['Flexi + Martin - astral projection'];
@@ -56,9 +58,14 @@ const VisualizerInner = () => {
const initialPresetLoadedRef = useRef(false); const initialPresetLoadedRef = useRef(false);
const butterchurnSettings = useButterchurnSettings(); const butterchurnSettings = useButterchurnSettings();
const opacity = useSettingsStore((store) => store.visualizer.butterchurn.opacity); const opacity = useSettingsStore((store) => store.visualizer.butterchurn.opacity);
const playbackType = usePlaybackType();
const { setSettings } = useSettingsStoreActions(); const { setSettings } = useSettingsStoreActions();
const playerStatus = usePlayerStatus(); const playerStatus = usePlayerStatus();
const isPlaying = playerStatus === PlayerStatus.PLAYING; const isPlaying = playerStatus === PlayerStatus.PLAYING;
const [webInitGeneration, setWebInitGeneration] = useState(0);
const wasPlayingRef = useRef(false);
const isFirstWebMountRef = useRef(true);
const prevPlaybackTypeRef = useRef(playbackType);
useEffect(() => { useEffect(() => {
let isMounted = true; let isMounted = true;
@@ -89,6 +96,32 @@ const VisualizerInner = () => {
}; };
}, []); }, []);
useEffect(() => {
const prevType = prevPlaybackTypeRef.current;
if (playbackType !== PlayerType.WEB) {
prevPlaybackTypeRef.current = playbackType;
wasPlayingRef.current = isPlaying;
return;
}
if (isFirstWebMountRef.current) {
isFirstWebMountRef.current = false;
wasPlayingRef.current = isPlaying;
prevPlaybackTypeRef.current = playbackType;
return;
}
const wasPlaying = wasPlayingRef.current;
wasPlayingRef.current = isPlaying;
if (isPlaying && (!wasPlaying || prevType !== PlayerType.WEB)) {
setWebInitGeneration((g) => g + 1);
}
prevPlaybackTypeRef.current = playbackType;
}, [playbackType, isPlaying]);
const cleanupVisualizer = () => { const cleanupVisualizer = () => {
if (animationFrameRef.current) { if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current); cancelAnimationFrame(animationFrameRef.current);
@@ -118,17 +151,20 @@ const VisualizerInner = () => {
// Initialize butterchurn instance // Initialize butterchurn instance
useEffect(() => { useEffect(() => {
const { context, gains } = webAudio || {}; const { context } = webAudio || {};
const inputNodes = getVisualizerAudioNodes(webAudio, playbackType);
const canvas = canvasRef.current; const canvas = canvasRef.current;
const container = containerRef.current; const container = containerRef.current;
const shouldRunForWebPlayback = playbackType === PlayerType.WEB && isPlaying;
const shouldRunForMpvLoopback = playbackType === PlayerType.LOCAL && inputNodes.length > 0;
const needsInitialization = const needsInitialization =
context && context &&
gains && inputNodes.length > 0 &&
gains.length > 0 &&
canvas && canvas &&
container && container &&
isPlaying && (shouldRunForWebPlayback || shouldRunForMpvLoopback) &&
librariesLoaded && librariesLoaded &&
(!isInitializedRef.current || !visualizerRef.current); (!isInitializedRef.current || !visualizerRef.current);
@@ -159,7 +195,8 @@ const VisualizerInner = () => {
} }
async function initializeVisualizer(width: number, height: number) { async function initializeVisualizer(width: number, height: number) {
if (!gains || gains.length === 0 || !canvas || !context || !librariesLoaded) return; const nodes = getVisualizerAudioNodes(webAudio, playbackType);
if (!nodes.length || !canvas || !context || !librariesLoaded) return;
canvas.width = width; canvas.width = width;
canvas.height = height; canvas.height = height;
@@ -173,8 +210,8 @@ const VisualizerInner = () => {
width, width,
}) as ButterchurnVisualizer; }) as ButterchurnVisualizer;
for (const gain of gains) { for (const node of nodes) {
butterchurnInstance.connectAudio(gain); butterchurnInstance.connectAudio(node);
} }
visualizerRef.current = butterchurnInstance; visualizerRef.current = butterchurnInstance;
@@ -192,10 +229,18 @@ const VisualizerInner = () => {
cleanupVisualizer(); cleanupVisualizer();
}; };
// eslint-disable-next-line react-hooks/exhaustive-deps // eslint-disable-next-line react-hooks/exhaustive-deps
}, [webAudio, isPlaying, librariesLoaded]); }, [webAudio, playbackType, librariesLoaded, webInitGeneration]);
// Kill visualizer after 5 seconds of pause // Kill visualizer after 5 seconds of pause
useEffect(() => { useEffect(() => {
if (playbackType === PlayerType.LOCAL) {
if (pauseTimerRef.current) {
clearTimeout(pauseTimerRef.current);
pauseTimerRef.current = undefined;
}
return;
}
if (isPlaying) { if (isPlaying) {
// Clear pause timer if player resumes // Clear pause timer if player resumes
if (pauseTimerRef.current) { if (pauseTimerRef.current) {
@@ -220,7 +265,7 @@ const VisualizerInner = () => {
pauseTimerRef.current = undefined; pauseTimerRef.current = undefined;
} }
}; };
}, [isPlaying]); }, [isPlaying, playbackType]);
// Handle resize // Handle resize
useEffect(() => { useEffect(() => {
@@ -460,7 +505,7 @@ const VisualizerInner = () => {
}; };
}, [isVisualizerReady, librariesLoaded, butterchurnSettings.blendTime]); }, [isVisualizerReady, librariesLoaded, butterchurnSettings.blendTime]);
const shouldRenderContainer = isPlaying || isVisualizerReady; const shouldRenderContainer = isPlaying || isVisualizerReady || !!webAudio;
if (!shouldRenderContainer) { if (!shouldRenderContainer) {
return null; return null;
@@ -0,0 +1,6 @@
import { useVisualizerSystemAudio } from '/@/renderer/features/player/hooks/use-visualizer-system-audio';
export function VisualizerSystemAudioBridge() {
useVisualizerSystemAudio();
return null;
}
+1
View File
@@ -288,4 +288,5 @@ export interface UniqueId {
export type WebAudio = { export type WebAudio = {
context: AudioContext; context: AudioContext;
gains: GainNode[]; gains: GainNode[];
visualizerInputs?: AudioNode[];
}; };