add system audio loopback for webaudio

This commit is contained in:
jeffvli
2026-04-05 00:48:38 -07:00
parent 25bb7f7069
commit 94886a2d5a
12 changed files with 257 additions and 28 deletions
@@ -15,7 +15,7 @@ import {
} from '/@/renderer/store/full-screen-player.store';
import { Button } from '/@/shared/components/button/button';
import { Group } from '/@/shared/components/group/group';
import { ItemListKey, PlayerType } from '/@/shared/types/types';
import { ItemListKey } from '/@/shared/types/types';
const AudioMotionAnalyzerVisualizer = lazy(() =>
import('../../visualizer/components/audiomotionanalyzer/visualizer').then((module) => ({
@@ -33,7 +33,7 @@ export const FullScreenPlayerQueue = () => {
const { t } = useTranslation();
const { activeTab, opacity } = useFullScreenPlayerStore();
const { setStore } = useFullScreenPlayerStoreActions();
const { type, webAudio } = usePlaybackSettings();
const { webAudio } = usePlaybackSettings();
const visualizerType = useSettingsStore((store) => store.visualizer.type);
const headerItems = useMemo(() => {
@@ -55,7 +55,7 @@ export const FullScreenPlayerQueue = () => {
},
];
if (type === PlayerType.WEB && webAudio) {
if (webAudio) {
items.push({
active: activeTab === 'visualizer',
label: t('page.fullscreenPlayer.visualizer', { postProcess: 'titleCase' }),
@@ -64,7 +64,7 @@ export const FullScreenPlayerQueue = () => {
}
return items;
}, [activeTab, setStore, t, type, webAudio]);
}, [activeTab, setStore, t, webAudio]);
return (
<div
@@ -119,7 +119,7 @@ export const FullScreenPlayerQueue = () => {
</div>
) : activeTab === 'lyrics' ? (
<Lyrics fadeOutNoLyricsMessage={false} />
) : activeTab === 'visualizer' && type === PlayerType.WEB && webAudio ? (
) : activeTab === 'visualizer' && webAudio ? (
<Suspense fallback={<></>}>
{visualizerType === 'butterchurn' ? (
<ButterchurnVisualizer />
@@ -13,7 +13,7 @@ import {
useWindowSettings,
} from '/@/renderer/store/settings.store';
import { useHotkeys } from '/@/shared/hooks/use-hotkeys';
import { Platform, PlayerType } from '/@/shared/types/types';
import { Platform } from '/@/shared/types/types';
const AudioMotionAnalyzerVisualizer = lazy(() =>
import('../../visualizer/components/audiomotionanalyzer/visualizer').then((module) => ({
@@ -131,7 +131,7 @@ VisualizerContainer.displayName = 'VisualizerContainer';
export const FullScreenVisualizer = () => {
const { setStore } = useFullScreenPlayerStoreActions();
const { windowBarStyle } = useWindowSettings();
const { type, webAudio } = usePlaybackSettings();
const { webAudio } = usePlaybackSettings();
const visualizerType = useSettingsStore((store) => store.visualizer.type);
const isMobile = useIsMobile();
@@ -155,7 +155,7 @@ export const FullScreenVisualizer = () => {
return (
<VisualizerContainer isMobile={isMobile} windowBarStyle={windowBarStyle}>
<div className={styles.visualizerContainer}>
{type === PlayerType.WEB && webAudio ? (
{webAudio ? (
<Suspense fallback={<></>}>
{visualizerType === 'butterchurn' ? (
<ButterchurnVisualizer />
@@ -0,0 +1,138 @@
import isElectron from 'is-electron';
import { useCallback, useEffect, useRef } from 'react';
import i18n from '/@/i18n/i18n';
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
import { usePlaybackType } from '/@/renderer/store/settings.store';
import { toast } from '/@/shared/components/toast/toast';
import { PlayerType } from '/@/shared/types/types';
export function useVisualizerSystemAudio() {
const playbackType = usePlaybackType();
const { setWebAudio, webAudio } = useWebAudio();
const webAudioRef = useRef(webAudio);
const streamRef = useRef<MediaStream | null>(null);
const sourceRef = useRef<MediaStreamAudioSourceNode | null>(null);
const connectInFlightRef = useRef(false);
useEffect(() => {
webAudioRef.current = webAudio;
}, [webAudio]);
const disconnect = useCallback(() => {
if (streamRef.current) {
streamRef.current.getTracks().forEach((t) => t.stop());
streamRef.current = null;
}
if (sourceRef.current) {
try {
sourceRef.current.disconnect();
} catch {
// ignore
}
sourceRef.current = null;
}
const w = webAudioRef.current;
if (w?.visualizerInputs?.length && setWebAudio) {
const next = { ...w, visualizerInputs: undefined };
setWebAudio(next);
webAudioRef.current = next;
}
}, [setWebAudio]);
useEffect(() => {
if (playbackType === PlayerType.WEB) {
disconnect();
}
}, [playbackType, disconnect]);
const connect = useCallback(async () => {
if (!isElectron()) {
return;
}
const w = webAudioRef.current;
if (!w?.context || w.context.state === 'closed') {
return;
}
if (!setWebAudio) return;
disconnect();
const wAfterDisconnect = webAudioRef.current;
if (!wAfterDisconnect?.context || wAfterDisconnect.context.state === 'closed') {
return;
}
connectInFlightRef.current = true;
try {
const stream = await navigator.mediaDevices.getDisplayMedia({
audio: true,
video: true,
});
const audioTracks = stream.getAudioTracks();
if (audioTracks.length === 0) {
stream.getTracks().forEach((t) => t.stop());
toast.error({ message: i18n.t('visualizer.systemAudioNoAudioTrack') });
return;
}
const latest = webAudioRef.current;
if (!latest?.context || latest.context.state === 'closed') {
stream.getTracks().forEach((t) => t.stop());
return;
}
try {
await latest.context.resume();
} catch {
// ignore
}
const source = latest.context.createMediaStreamSource(stream);
streamRef.current = stream;
sourceRef.current = source;
const next = { ...latest, visualizerInputs: [source] };
setWebAudio(next);
webAudioRef.current = next;
} catch (e) {
const name = (e as DOMException)?.name;
if (name === 'NotAllowedError' || name === 'AbortError') {
return;
}
toast.error({
message: i18n.t('visualizer.systemAudioCaptureFailed', {
message: (e as Error).message,
}),
});
} finally {
connectInFlightRef.current = false;
}
}, [disconnect, setWebAudio]);
const connectRef = useRef(connect);
connectRef.current = connect;
useEffect(() => {
if (playbackType !== PlayerType.LOCAL || !isElectron()) {
return;
}
const w = webAudioRef.current;
if (!w?.context || w.context.state === 'closed') {
return;
}
if (w.visualizerInputs?.length) {
return;
}
if (connectInFlightRef.current) {
return;
}
void connectRef.current();
}, [playbackType, webAudio?.context, webAudio?.visualizerInputs?.length]);
}
@@ -0,0 +1,14 @@
import type { WebAudio } from '/@/shared/types/types';
import { PlayerType } from '/@/shared/types/types';
export function getVisualizerAudioNodes(
webAudio: undefined | WebAudio,
playbackType: PlayerType,
): AudioNode[] {
if (!webAudio) return [];
if (playbackType === PlayerType.LOCAL) {
return webAudio.visualizerInputs ?? [];
}
return webAudio.gains;
}