mirror of
https://github.com/jeffvli/feishin.git
synced 2026-05-06 20:10:12 +02:00
add system audio loopback for webaudio
This commit is contained in:
@@ -1214,6 +1214,8 @@
|
||||
"mainText": "drop a file here"
|
||||
},
|
||||
"visualizer": {
|
||||
"systemAudioCaptureFailed": "Could not start capture: {{message}}",
|
||||
"systemAudioNoAudioTrack": "No audio track was returned. Ensure audio capture is enabled when prompted.",
|
||||
"visualizerType": "Visualizer Type",
|
||||
"cyclePresets": "Cycle Presets",
|
||||
"cycleTime": "Cycle Time (seconds)",
|
||||
|
||||
@@ -5,6 +5,7 @@ import {
|
||||
app,
|
||||
BrowserWindow,
|
||||
BrowserWindowConstructorOptions,
|
||||
desktopCapturer,
|
||||
globalShortcut,
|
||||
ipcMain,
|
||||
Menu,
|
||||
@@ -732,6 +733,22 @@ async function createWindow(first = true): Promise<void> {
|
||||
return { action: 'deny' };
|
||||
});
|
||||
|
||||
mainWindow.webContents.session.setDisplayMediaRequestHandler((_request, callback) => {
|
||||
desktopCapturer
|
||||
.getSources({ types: ['screen'] })
|
||||
.then((sources) => {
|
||||
if (sources.length > 0) {
|
||||
callback({ audio: 'loopback', video: sources[0] });
|
||||
} else {
|
||||
callback({});
|
||||
}
|
||||
})
|
||||
.catch((err) => {
|
||||
log.warn('desktopCapturer.getSources failed', err);
|
||||
callback({});
|
||||
});
|
||||
});
|
||||
|
||||
if (!disableAutoUpdates() && store.get('disable_auto_updates') !== true) {
|
||||
new AppUpdater();
|
||||
}
|
||||
|
||||
@@ -11,6 +11,7 @@ import { lazy, memo, Suspense, useEffect, useMemo, useRef, useState } from 'reac
|
||||
|
||||
import i18n from '/@/i18n/i18n';
|
||||
import { WebAudioContext } from '/@/renderer/features/player/context/webaudio-context';
|
||||
import { VisualizerSystemAudioBridge } from '/@/renderer/features/visualizer/components/visualizer-system-audio-bridge';
|
||||
import { useCheckForUpdates } from '/@/renderer/hooks/use-check-for-updates';
|
||||
import { useNativeMenuSync } from '/@/renderer/hooks/use-native-menu-sync';
|
||||
import { useSyncSettingsToMain } from '/@/renderer/hooks/use-sync-settings-to-main';
|
||||
@@ -79,6 +80,7 @@ const AppShell = memo(function AppShell() {
|
||||
<WebAudioContext.Provider value={webAudioProvider}>
|
||||
<PlayerProvider>
|
||||
<AudioPlayers />
|
||||
<VisualizerSystemAudioBridge />
|
||||
<AppRouter />
|
||||
</PlayerProvider>
|
||||
</WebAudioContext.Provider>
|
||||
|
||||
@@ -27,7 +27,7 @@ import {
|
||||
import { ActionIcon, ActionIconGroup } from '/@/shared/components/action-icon/action-icon';
|
||||
import { Flex } from '/@/shared/components/flex/flex';
|
||||
import { Stack } from '/@/shared/components/stack/stack';
|
||||
import { ItemListKey, Platform, PlayerType } from '/@/shared/types/types';
|
||||
import { ItemListKey, Platform } from '/@/shared/types/types';
|
||||
|
||||
type SidebarPanelType = 'lyrics' | 'queue' | 'visualizer';
|
||||
|
||||
@@ -55,9 +55,9 @@ export const SidebarPlayQueue = () => {
|
||||
const showLyricsInSidebar = useShowLyricsInSidebar();
|
||||
const showVisualizerInSidebar = useShowVisualizerInSidebar();
|
||||
const sidebarPanelOrder = useSidebarPanelOrder();
|
||||
const { type, webAudio } = usePlaybackSettings();
|
||||
const { webAudio } = usePlaybackSettings();
|
||||
const { windowBarStyle } = useWindowSettings();
|
||||
const showVisualizer = showVisualizerInSidebar && type === PlayerType.WEB && webAudio;
|
||||
const showVisualizer = showVisualizerInSidebar && webAudio;
|
||||
const showPanel = showLyricsInSidebar || showVisualizer;
|
||||
|
||||
const shouldAddTopMargin = isElectron() && windowBarStyle === Platform.WEB;
|
||||
@@ -374,8 +374,8 @@ const CombinedLyricsAndVisualizerPanel = () => {
|
||||
const visualizerType = useSettingsStore((store) => store.visualizer.type);
|
||||
const showLyricsInSidebar = useShowLyricsInSidebar();
|
||||
const showVisualizerInSidebar = useShowVisualizerInSidebar();
|
||||
const { type, webAudio } = usePlaybackSettings();
|
||||
const showVisualizer = showVisualizerInSidebar && type === PlayerType.WEB && webAudio;
|
||||
const { webAudio } = usePlaybackSettings();
|
||||
const showVisualizer = showVisualizerInSidebar && webAudio;
|
||||
|
||||
const { data: lyricsData } = useQuery(
|
||||
lyricsQueries.songLyrics(
|
||||
|
||||
@@ -15,7 +15,7 @@ import {
|
||||
} from '/@/renderer/store/full-screen-player.store';
|
||||
import { Button } from '/@/shared/components/button/button';
|
||||
import { Group } from '/@/shared/components/group/group';
|
||||
import { ItemListKey, PlayerType } from '/@/shared/types/types';
|
||||
import { ItemListKey } from '/@/shared/types/types';
|
||||
|
||||
const AudioMotionAnalyzerVisualizer = lazy(() =>
|
||||
import('../../visualizer/components/audiomotionanalyzer/visualizer').then((module) => ({
|
||||
@@ -33,7 +33,7 @@ export const FullScreenPlayerQueue = () => {
|
||||
const { t } = useTranslation();
|
||||
const { activeTab, opacity } = useFullScreenPlayerStore();
|
||||
const { setStore } = useFullScreenPlayerStoreActions();
|
||||
const { type, webAudio } = usePlaybackSettings();
|
||||
const { webAudio } = usePlaybackSettings();
|
||||
const visualizerType = useSettingsStore((store) => store.visualizer.type);
|
||||
|
||||
const headerItems = useMemo(() => {
|
||||
@@ -55,7 +55,7 @@ export const FullScreenPlayerQueue = () => {
|
||||
},
|
||||
];
|
||||
|
||||
if (type === PlayerType.WEB && webAudio) {
|
||||
if (webAudio) {
|
||||
items.push({
|
||||
active: activeTab === 'visualizer',
|
||||
label: t('page.fullscreenPlayer.visualizer', { postProcess: 'titleCase' }),
|
||||
@@ -64,7 +64,7 @@ export const FullScreenPlayerQueue = () => {
|
||||
}
|
||||
|
||||
return items;
|
||||
}, [activeTab, setStore, t, type, webAudio]);
|
||||
}, [activeTab, setStore, t, webAudio]);
|
||||
|
||||
return (
|
||||
<div
|
||||
@@ -119,7 +119,7 @@ export const FullScreenPlayerQueue = () => {
|
||||
</div>
|
||||
) : activeTab === 'lyrics' ? (
|
||||
<Lyrics fadeOutNoLyricsMessage={false} />
|
||||
) : activeTab === 'visualizer' && type === PlayerType.WEB && webAudio ? (
|
||||
) : activeTab === 'visualizer' && webAudio ? (
|
||||
<Suspense fallback={<></>}>
|
||||
{visualizerType === 'butterchurn' ? (
|
||||
<ButterchurnVisualizer />
|
||||
|
||||
@@ -13,7 +13,7 @@ import {
|
||||
useWindowSettings,
|
||||
} from '/@/renderer/store/settings.store';
|
||||
import { useHotkeys } from '/@/shared/hooks/use-hotkeys';
|
||||
import { Platform, PlayerType } from '/@/shared/types/types';
|
||||
import { Platform } from '/@/shared/types/types';
|
||||
|
||||
const AudioMotionAnalyzerVisualizer = lazy(() =>
|
||||
import('../../visualizer/components/audiomotionanalyzer/visualizer').then((module) => ({
|
||||
@@ -131,7 +131,7 @@ VisualizerContainer.displayName = 'VisualizerContainer';
|
||||
export const FullScreenVisualizer = () => {
|
||||
const { setStore } = useFullScreenPlayerStoreActions();
|
||||
const { windowBarStyle } = useWindowSettings();
|
||||
const { type, webAudio } = usePlaybackSettings();
|
||||
const { webAudio } = usePlaybackSettings();
|
||||
const visualizerType = useSettingsStore((store) => store.visualizer.type);
|
||||
const isMobile = useIsMobile();
|
||||
|
||||
@@ -155,7 +155,7 @@ export const FullScreenVisualizer = () => {
|
||||
return (
|
||||
<VisualizerContainer isMobile={isMobile} windowBarStyle={windowBarStyle}>
|
||||
<div className={styles.visualizerContainer}>
|
||||
{type === PlayerType.WEB && webAudio ? (
|
||||
{webAudio ? (
|
||||
<Suspense fallback={<></>}>
|
||||
{visualizerType === 'butterchurn' ? (
|
||||
<ButterchurnVisualizer />
|
||||
|
||||
@@ -0,0 +1,138 @@
|
||||
import isElectron from 'is-electron';
|
||||
import { useCallback, useEffect, useRef } from 'react';
|
||||
|
||||
import i18n from '/@/i18n/i18n';
|
||||
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
|
||||
import { usePlaybackType } from '/@/renderer/store/settings.store';
|
||||
import { toast } from '/@/shared/components/toast/toast';
|
||||
import { PlayerType } from '/@/shared/types/types';
|
||||
|
||||
export function useVisualizerSystemAudio() {
|
||||
const playbackType = usePlaybackType();
|
||||
const { setWebAudio, webAudio } = useWebAudio();
|
||||
const webAudioRef = useRef(webAudio);
|
||||
const streamRef = useRef<MediaStream | null>(null);
|
||||
const sourceRef = useRef<MediaStreamAudioSourceNode | null>(null);
|
||||
const connectInFlightRef = useRef(false);
|
||||
|
||||
useEffect(() => {
|
||||
webAudioRef.current = webAudio;
|
||||
}, [webAudio]);
|
||||
|
||||
const disconnect = useCallback(() => {
|
||||
if (streamRef.current) {
|
||||
streamRef.current.getTracks().forEach((t) => t.stop());
|
||||
streamRef.current = null;
|
||||
}
|
||||
if (sourceRef.current) {
|
||||
try {
|
||||
sourceRef.current.disconnect();
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
sourceRef.current = null;
|
||||
}
|
||||
const w = webAudioRef.current;
|
||||
if (w?.visualizerInputs?.length && setWebAudio) {
|
||||
const next = { ...w, visualizerInputs: undefined };
|
||||
setWebAudio(next);
|
||||
webAudioRef.current = next;
|
||||
}
|
||||
}, [setWebAudio]);
|
||||
|
||||
useEffect(() => {
|
||||
if (playbackType === PlayerType.WEB) {
|
||||
disconnect();
|
||||
}
|
||||
}, [playbackType, disconnect]);
|
||||
|
||||
const connect = useCallback(async () => {
|
||||
if (!isElectron()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const w = webAudioRef.current;
|
||||
if (!w?.context || w.context.state === 'closed') {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!setWebAudio) return;
|
||||
|
||||
disconnect();
|
||||
|
||||
const wAfterDisconnect = webAudioRef.current;
|
||||
if (!wAfterDisconnect?.context || wAfterDisconnect.context.state === 'closed') {
|
||||
return;
|
||||
}
|
||||
|
||||
connectInFlightRef.current = true;
|
||||
|
||||
try {
|
||||
const stream = await navigator.mediaDevices.getDisplayMedia({
|
||||
audio: true,
|
||||
video: true,
|
||||
});
|
||||
|
||||
const audioTracks = stream.getAudioTracks();
|
||||
if (audioTracks.length === 0) {
|
||||
stream.getTracks().forEach((t) => t.stop());
|
||||
toast.error({ message: i18n.t('visualizer.systemAudioNoAudioTrack') });
|
||||
return;
|
||||
}
|
||||
|
||||
const latest = webAudioRef.current;
|
||||
if (!latest?.context || latest.context.state === 'closed') {
|
||||
stream.getTracks().forEach((t) => t.stop());
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
await latest.context.resume();
|
||||
} catch {
|
||||
// ignore
|
||||
}
|
||||
|
||||
const source = latest.context.createMediaStreamSource(stream);
|
||||
streamRef.current = stream;
|
||||
sourceRef.current = source;
|
||||
|
||||
const next = { ...latest, visualizerInputs: [source] };
|
||||
setWebAudio(next);
|
||||
webAudioRef.current = next;
|
||||
} catch (e) {
|
||||
const name = (e as DOMException)?.name;
|
||||
if (name === 'NotAllowedError' || name === 'AbortError') {
|
||||
return;
|
||||
}
|
||||
toast.error({
|
||||
message: i18n.t('visualizer.systemAudioCaptureFailed', {
|
||||
message: (e as Error).message,
|
||||
}),
|
||||
});
|
||||
} finally {
|
||||
connectInFlightRef.current = false;
|
||||
}
|
||||
}, [disconnect, setWebAudio]);
|
||||
|
||||
const connectRef = useRef(connect);
|
||||
connectRef.current = connect;
|
||||
|
||||
useEffect(() => {
|
||||
if (playbackType !== PlayerType.LOCAL || !isElectron()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const w = webAudioRef.current;
|
||||
if (!w?.context || w.context.state === 'closed') {
|
||||
return;
|
||||
}
|
||||
if (w.visualizerInputs?.length) {
|
||||
return;
|
||||
}
|
||||
if (connectInFlightRef.current) {
|
||||
return;
|
||||
}
|
||||
|
||||
void connectRef.current();
|
||||
}, [playbackType, webAudio?.context, webAudio?.visualizerInputs?.length]);
|
||||
}
|
||||
@@ -0,0 +1,14 @@
|
||||
import type { WebAudio } from '/@/shared/types/types';
|
||||
|
||||
import { PlayerType } from '/@/shared/types/types';
|
||||
|
||||
export function getVisualizerAudioNodes(
|
||||
webAudio: undefined | WebAudio,
|
||||
playbackType: PlayerType,
|
||||
): AudioNode[] {
|
||||
if (!webAudio) return [];
|
||||
if (playbackType === PlayerType.LOCAL) {
|
||||
return webAudio.visualizerInputs ?? [];
|
||||
}
|
||||
return webAudio.gains;
|
||||
}
|
||||
@@ -3,9 +3,10 @@ import { createRef, useCallback, useEffect, useMemo, useRef, useState } from 're
|
||||
import styles from './visualizer.module.css';
|
||||
|
||||
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
|
||||
import { getVisualizerAudioNodes } from '/@/renderer/features/player/utils/get-visualizer-audio-nodes';
|
||||
import { openVisualizerSettingsModal } from '/@/renderer/features/player/utils/open-visualizer-settings-modal';
|
||||
import { ComponentErrorBoundary } from '/@/renderer/features/shared/components/component-error-boundary';
|
||||
import { useAccent, useSettingsStore } from '/@/renderer/store';
|
||||
import { useAccent, usePlaybackType, useSettingsStore } from '/@/renderer/store';
|
||||
import {
|
||||
useFullScreenPlayerStore,
|
||||
useFullScreenPlayerStoreActions,
|
||||
@@ -18,6 +19,7 @@ const VisualizerInner = () => {
|
||||
const canvasRef = createRef<HTMLDivElement>();
|
||||
const accent = useAccent();
|
||||
const visualizer = useSettingsStore((store) => store.visualizer);
|
||||
const playbackType = usePlaybackType();
|
||||
const opacity = useSettingsStore((store) => store.visualizer.audiomotionanalyzer.opacity);
|
||||
const [motion, setMotion] = useState<any>();
|
||||
const [libraryLoaded, setLibraryLoaded] = useState(false);
|
||||
@@ -214,9 +216,10 @@ const VisualizerInner = () => {
|
||||
);
|
||||
|
||||
useEffect(() => {
|
||||
const { context, gains } = webAudio || {};
|
||||
const { context } = webAudio || {};
|
||||
const inputNodes = getVisualizerAudioNodes(webAudio, playbackType);
|
||||
let audioMotion: any | undefined;
|
||||
if (gains && context && canvasRef.current && !motion && libraryLoaded) {
|
||||
if (inputNodes.length > 0 && context && canvasRef.current && !motion && libraryLoaded) {
|
||||
const AudioMotionAnalyzer = AudioMotionAnalyzerRef.current;
|
||||
if (!AudioMotionAnalyzer) return;
|
||||
|
||||
@@ -249,7 +252,7 @@ const VisualizerInner = () => {
|
||||
registerCustomGradients(audioMotion);
|
||||
|
||||
setMotion(audioMotion);
|
||||
for (const gain of gains) audioMotion.connectInput(gain);
|
||||
for (const node of inputNodes) audioMotion.connectInput(node);
|
||||
}
|
||||
|
||||
return () => {
|
||||
@@ -262,6 +265,7 @@ const VisualizerInner = () => {
|
||||
accent,
|
||||
canvasRef,
|
||||
registerCustomGradients,
|
||||
playbackType,
|
||||
webAudio,
|
||||
visualizer,
|
||||
options,
|
||||
|
||||
@@ -3,11 +3,13 @@ import { createRef, useCallback, useEffect, useRef, useState } from 'react';
|
||||
import styles from './visualizer.module.css';
|
||||
|
||||
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
|
||||
import { getVisualizerAudioNodes } from '/@/renderer/features/player/utils/get-visualizer-audio-nodes';
|
||||
import { openVisualizerSettingsModal } from '/@/renderer/features/player/utils/open-visualizer-settings-modal';
|
||||
import { ComponentErrorBoundary } from '/@/renderer/features/shared/components/component-error-boundary';
|
||||
import {
|
||||
subscribeButterchurnPreset,
|
||||
useButterchurnSettings,
|
||||
usePlaybackType,
|
||||
useSettingsStore,
|
||||
useSettingsStoreActions,
|
||||
} from '/@/renderer/store';
|
||||
@@ -19,7 +21,7 @@ import { usePlayerStatus } from '/@/renderer/store/player.store';
|
||||
import { ActionIcon } from '/@/shared/components/action-icon/action-icon';
|
||||
import { Group } from '/@/shared/components/group/group';
|
||||
import { Text } from '/@/shared/components/text/text';
|
||||
import { PlayerStatus } from '/@/shared/types/types';
|
||||
import { PlayerStatus, PlayerType } from '/@/shared/types/types';
|
||||
|
||||
// Ignore presets that are erroring out
|
||||
const IGNORED_PRESETS = ['Flexi + Martin - astral projection'];
|
||||
@@ -56,9 +58,14 @@ const VisualizerInner = () => {
|
||||
const initialPresetLoadedRef = useRef(false);
|
||||
const butterchurnSettings = useButterchurnSettings();
|
||||
const opacity = useSettingsStore((store) => store.visualizer.butterchurn.opacity);
|
||||
const playbackType = usePlaybackType();
|
||||
const { setSettings } = useSettingsStoreActions();
|
||||
const playerStatus = usePlayerStatus();
|
||||
const isPlaying = playerStatus === PlayerStatus.PLAYING;
|
||||
const [webInitGeneration, setWebInitGeneration] = useState(0);
|
||||
const wasPlayingRef = useRef(false);
|
||||
const isFirstWebMountRef = useRef(true);
|
||||
const prevPlaybackTypeRef = useRef(playbackType);
|
||||
|
||||
useEffect(() => {
|
||||
let isMounted = true;
|
||||
@@ -89,6 +96,32 @@ const VisualizerInner = () => {
|
||||
};
|
||||
}, []);
|
||||
|
||||
useEffect(() => {
|
||||
const prevType = prevPlaybackTypeRef.current;
|
||||
|
||||
if (playbackType !== PlayerType.WEB) {
|
||||
prevPlaybackTypeRef.current = playbackType;
|
||||
wasPlayingRef.current = isPlaying;
|
||||
return;
|
||||
}
|
||||
|
||||
if (isFirstWebMountRef.current) {
|
||||
isFirstWebMountRef.current = false;
|
||||
wasPlayingRef.current = isPlaying;
|
||||
prevPlaybackTypeRef.current = playbackType;
|
||||
return;
|
||||
}
|
||||
|
||||
const wasPlaying = wasPlayingRef.current;
|
||||
wasPlayingRef.current = isPlaying;
|
||||
|
||||
if (isPlaying && (!wasPlaying || prevType !== PlayerType.WEB)) {
|
||||
setWebInitGeneration((g) => g + 1);
|
||||
}
|
||||
|
||||
prevPlaybackTypeRef.current = playbackType;
|
||||
}, [playbackType, isPlaying]);
|
||||
|
||||
const cleanupVisualizer = () => {
|
||||
if (animationFrameRef.current) {
|
||||
cancelAnimationFrame(animationFrameRef.current);
|
||||
@@ -118,17 +151,20 @@ const VisualizerInner = () => {
|
||||
|
||||
// Initialize butterchurn instance
|
||||
useEffect(() => {
|
||||
const { context, gains } = webAudio || {};
|
||||
const { context } = webAudio || {};
|
||||
const inputNodes = getVisualizerAudioNodes(webAudio, playbackType);
|
||||
const canvas = canvasRef.current;
|
||||
const container = containerRef.current;
|
||||
|
||||
const shouldRunForWebPlayback = playbackType === PlayerType.WEB && isPlaying;
|
||||
const shouldRunForMpvLoopback = playbackType === PlayerType.LOCAL && inputNodes.length > 0;
|
||||
|
||||
const needsInitialization =
|
||||
context &&
|
||||
gains &&
|
||||
gains.length > 0 &&
|
||||
inputNodes.length > 0 &&
|
||||
canvas &&
|
||||
container &&
|
||||
isPlaying &&
|
||||
(shouldRunForWebPlayback || shouldRunForMpvLoopback) &&
|
||||
librariesLoaded &&
|
||||
(!isInitializedRef.current || !visualizerRef.current);
|
||||
|
||||
@@ -159,7 +195,8 @@ const VisualizerInner = () => {
|
||||
}
|
||||
|
||||
async function initializeVisualizer(width: number, height: number) {
|
||||
if (!gains || gains.length === 0 || !canvas || !context || !librariesLoaded) return;
|
||||
const nodes = getVisualizerAudioNodes(webAudio, playbackType);
|
||||
if (!nodes.length || !canvas || !context || !librariesLoaded) return;
|
||||
|
||||
canvas.width = width;
|
||||
canvas.height = height;
|
||||
@@ -173,8 +210,8 @@ const VisualizerInner = () => {
|
||||
width,
|
||||
}) as ButterchurnVisualizer;
|
||||
|
||||
for (const gain of gains) {
|
||||
butterchurnInstance.connectAudio(gain);
|
||||
for (const node of nodes) {
|
||||
butterchurnInstance.connectAudio(node);
|
||||
}
|
||||
|
||||
visualizerRef.current = butterchurnInstance;
|
||||
@@ -192,10 +229,18 @@ const VisualizerInner = () => {
|
||||
cleanupVisualizer();
|
||||
};
|
||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||
}, [webAudio, isPlaying, librariesLoaded]);
|
||||
}, [webAudio, playbackType, librariesLoaded, webInitGeneration]);
|
||||
|
||||
// Kill visualizer after 5 seconds of pause
|
||||
useEffect(() => {
|
||||
if (playbackType === PlayerType.LOCAL) {
|
||||
if (pauseTimerRef.current) {
|
||||
clearTimeout(pauseTimerRef.current);
|
||||
pauseTimerRef.current = undefined;
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (isPlaying) {
|
||||
// Clear pause timer if player resumes
|
||||
if (pauseTimerRef.current) {
|
||||
@@ -220,7 +265,7 @@ const VisualizerInner = () => {
|
||||
pauseTimerRef.current = undefined;
|
||||
}
|
||||
};
|
||||
}, [isPlaying]);
|
||||
}, [isPlaying, playbackType]);
|
||||
|
||||
// Handle resize
|
||||
useEffect(() => {
|
||||
@@ -460,7 +505,7 @@ const VisualizerInner = () => {
|
||||
};
|
||||
}, [isVisualizerReady, librariesLoaded, butterchurnSettings.blendTime]);
|
||||
|
||||
const shouldRenderContainer = isPlaying || isVisualizerReady;
|
||||
const shouldRenderContainer = isPlaying || isVisualizerReady || !!webAudio;
|
||||
|
||||
if (!shouldRenderContainer) {
|
||||
return null;
|
||||
|
||||
@@ -0,0 +1,6 @@
|
||||
import { useVisualizerSystemAudio } from '/@/renderer/features/player/hooks/use-visualizer-system-audio';
|
||||
|
||||
export function VisualizerSystemAudioBridge() {
|
||||
useVisualizerSystemAudio();
|
||||
return null;
|
||||
}
|
||||
@@ -288,4 +288,5 @@ export interface UniqueId {
|
||||
export type WebAudio = {
|
||||
context: AudioContext;
|
||||
gains: GainNode[];
|
||||
visualizerInputs?: AudioNode[];
|
||||
};
|
||||
|
||||
Reference in New Issue
Block a user