Add visualizer configuration (#1443)

* add visualizer configuration

* add visualizer presets

* add butterchurn visualizer

* wrap visualizers in error boundary
This commit is contained in:
Jeff
2025-12-24 18:12:13 -08:00
committed by GitHub
parent 8e04f98e26
commit d9172efae9
22 changed files with 3197 additions and 80 deletions
@@ -8,14 +8,25 @@ import { lyricsQueries } from '/@/renderer/features/lyrics/api/lyrics-api';
import { Lyrics } from '/@/renderer/features/lyrics/lyrics';
import { PlayQueue } from '/@/renderer/features/now-playing/components/play-queue';
import { PlayQueueListControls } from '/@/renderer/features/now-playing/components/play-queue-list-controls';
import { useGeneralSettings, usePlaybackSettings, usePlayerSong } from '/@/renderer/store';
import {
useGeneralSettings,
usePlaybackSettings,
usePlayerSong,
useSettingsStore,
} from '/@/renderer/store';
import { Divider } from '/@/shared/components/divider/divider';
import { Flex } from '/@/shared/components/flex/flex';
import { Stack } from '/@/shared/components/stack/stack';
import { ItemListKey, PlayerType } from '/@/shared/types/types';
const Visualizer = lazy(() =>
import('/@/renderer/features/player/components/visualizer').then((module) => ({
const AudioMotionAnalyzerVisualizer = lazy(() =>
import('../../visualizer/components/audiomotionanalyzer/visualizer').then((module) => ({
default: module.Visualizer,
})),
);
const ButterchurnVisualizer = lazy(() =>
import('../../visualizer/components/butternchurn/visualizer').then((module) => ({
default: module.Visualizer,
})),
);
@@ -48,6 +59,7 @@ export const SidebarPlayQueue = () => {
const BottomPanel = () => {
const { showLyricsInSidebar, showVisualizerInSidebar } = useGeneralSettings();
const { type, webAudio } = usePlaybackSettings();
const visualizerType = useSettingsStore((store) => store.visualizer.type);
const currentSong = usePlayerSong();
const { data: lyricsData } = useQuery(
@@ -102,7 +114,11 @@ const BottomPanel = () => {
}}
>
<Suspense fallback={<></>}>
<Visualizer />
{visualizerType === 'butterchurn' ? (
<ButterchurnVisualizer />
) : (
<AudioMotionAnalyzerVisualizer />
)}
</Suspense>
</div>
)}
@@ -111,7 +127,11 @@ const BottomPanel = () => {
showVisualizer && (
<div className={styles.visualizerSection}>
<Suspense fallback={<></>}>
<Visualizer />
{visualizerType === 'butterchurn' ? (
<ButterchurnVisualizer />
) : (
<AudioMotionAnalyzerVisualizer />
)}
</Suspense>
</div>
)
@@ -8,7 +8,7 @@ import styles from './full-screen-player-queue.module.css';
import { Lyrics } from '/@/renderer/features/lyrics/lyrics';
import { PlayQueue } from '/@/renderer/features/now-playing/components/play-queue';
import { FullScreenSimilarSongs } from '/@/renderer/features/player/components/full-screen-similar-songs';
import { usePlaybackSettings } from '/@/renderer/store';
import { usePlaybackSettings, useSettingsStore } from '/@/renderer/store';
import {
useFullScreenPlayerStore,
useFullScreenPlayerStoreActions,
@@ -17,8 +17,14 @@ import { Button } from '/@/shared/components/button/button';
import { Group } from '/@/shared/components/group/group';
import { ItemListKey, PlayerType } from '/@/shared/types/types';
const Visualizer = lazy(() =>
import('/@/renderer/features/player/components/visualizer').then((module) => ({
const AudioMotionAnalyzerVisualizer = lazy(() =>
import('../../visualizer/components/audiomotionanalyzer/visualizer').then((module) => ({
default: module.Visualizer,
})),
);
const ButterchurnVisualizer = lazy(() =>
import('../../visualizer/components/butternchurn/visualizer').then((module) => ({
default: module.Visualizer,
})),
);
@@ -28,6 +34,7 @@ export const FullScreenPlayerQueue = () => {
const { activeTab, opacity } = useFullScreenPlayerStore();
const { setStore } = useFullScreenPlayerStoreActions();
const { type, webAudio } = usePlaybackSettings();
const visualizerType = useSettingsStore((store) => store.visualizer.type);
const headerItems = useMemo(() => {
const items = [
@@ -109,7 +116,11 @@ export const FullScreenPlayerQueue = () => {
<Lyrics />
) : activeTab === 'visualizer' && type === PlayerType.WEB && webAudio ? (
<Suspense fallback={<></>}>
<Visualizer />
{visualizerType === 'butterchurn' ? (
<ButterchurnVisualizer />
) : (
<AudioMotionAnalyzerVisualizer />
)}
</Suspense>
) : null}
</div>
@@ -1,11 +0,0 @@
.container {
z-index: 50;
width: 100%;
height: 100%;
margin: auto;
canvas {
width: 100%;
margin: auto;
}
}
@@ -1,40 +0,0 @@
import AudioMotionAnalyzer from 'audiomotion-analyzer';
import { createRef, useEffect, useState } from 'react';
import styles from './visualizer.module.css';
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
import { useSettingsStore } from '/@/renderer/store';
export const Visualizer = () => {
const { webAudio } = useWebAudio();
const canvasRef = createRef<HTMLDivElement>();
const accent = useSettingsStore((store) => store.general.accent);
const [motion, setMotion] = useState<AudioMotionAnalyzer>();
useEffect(() => {
const { context, gains } = webAudio || {};
if (gains && context && canvasRef.current && !motion) {
const audioMotion = new AudioMotionAnalyzer(canvasRef.current, {
ansiBands: true,
audioCtx: context,
connectSpeakers: false,
gradient: 'prism',
ledBars: true,
mode: 8,
overlay: true,
showBgColor: false,
showPeaks: false,
showScaleX: false,
showScaleY: false,
smoothing: 0.8,
});
setMotion(audioMotion);
for (const gain of gains) audioMotion.connectInput(gain);
}
return () => {};
}, [accent, canvasRef, motion, webAudio]);
return <div className={styles.container} ref={canvasRef} />;
};
@@ -0,0 +1,25 @@
import { openContextModal } from '@mantine/modals';
export const openVisualizerSettingsModal = () => {
openContextModal({
innerProps: {},
modalKey: 'visualizerSettings',
overlayProps: {
blur: 0,
opacity: 1,
},
size: 'xl',
styles: {
content: {
height: '90%',
maxWidth: '1400px',
minHeight: '600px',
width: '100%',
},
},
title: 'Visualizer Settings',
transitionProps: {
transition: 'pop',
},
});
};
@@ -0,0 +1,11 @@
.container {
display: flex;
flex-direction: column;
gap: var(--theme-spacing-md);
width: 100%;
margin: 0 auto;
}
.select-label {
text-align: center;
}
@@ -0,0 +1,5 @@
import { VisualizerSettingsForm } from './visualizer-settings-form';
export const VisualizerSettingsContextModal = () => {
return <VisualizerSettingsForm />;
};
@@ -0,0 +1,28 @@
.container {
position: relative;
z-index: 50;
width: 100%;
height: 100%;
margin: auto;
canvas {
width: 100%;
margin: auto;
}
&:hover {
.settings-icon {
opacity: 1;
}
}
}
.container .settings-icon {
opacity: 0;
transition: opacity 0.2s ease-in-out;
}
.visualizer {
width: 100%;
height: 100%;
}
@@ -0,0 +1,243 @@
import AudioMotionAnalyzer from 'audiomotion-analyzer';
import { createRef, useCallback, useEffect, useMemo, useState } from 'react';
import styles from './visualizer.module.css';
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
import { openVisualizerSettingsModal } from '/@/renderer/features/player/utils/open-visualizer-settings-modal';
import { ComponentErrorBoundary } from '/@/renderer/features/shared/components/component-error-boundary';
import { useSettingsStore } from '/@/renderer/store';
import { ActionIcon } from '/@/shared/components/action-icon/action-icon';
const VisualizerInner = () => {
const { webAudio } = useWebAudio();
const canvasRef = createRef<HTMLDivElement>();
const accent = useSettingsStore((store) => store.general.accent);
const visualizer = useSettingsStore((store) => store.visualizer);
const [motion, setMotion] = useState<AudioMotionAnalyzer>();
// Check if a gradient name is a custom gradient
const isCustomGradient = useCallback(
(gradientName: string | undefined): boolean => {
if (!gradientName || visualizer.type !== 'audiomotionanalyzer') {
return false;
}
const customGradients = visualizer.audiomotionanalyzer.customGradients || [];
return customGradients.some((gradient) => gradient.name === gradientName);
},
[visualizer],
);
const [gradientsRegistered, setGradientsRegistered] = useState(false);
const options = useMemo(() => {
if (visualizer.type !== 'audiomotionanalyzer') {
return {};
}
const ama = visualizer.audiomotionanalyzer;
const defaults = {
bgAlpha: 0,
showBgColor: false,
};
const gradients: { gradient?: string; gradientLeft?: string; gradientRight?: string } = {};
// Use default gradient if custom gradient is selected but not yet registered
const getSafeGradient = (gradientName: string | undefined): string => {
if (!gradientName) return 'classic';
if (isCustomGradient(gradientName)) {
// Use default until custom gradients are registered
return gradientsRegistered ? gradientName : 'classic';
}
return gradientName;
};
if (ama.channelLayout === 'single') {
gradients.gradient = getSafeGradient(ama.gradient);
} else {
gradients.gradientLeft = getSafeGradient(ama.gradientLeft);
gradients.gradientRight = getSafeGradient(ama.gradientRight);
}
return {
...defaults,
...gradients,
alphaBars: ama.alphaBars,
ansiBands: ama.ansiBands,
barSpace: ama.barSpace,
channelLayout: ama.channelLayout,
colorMode: ama.colorMode,
connectSpeakers: false,
fadePeaks: ama.fadePeaks,
fftSize: ama.fftSize,
fillAlpha: ama.fillAlpha,
frequencyScale: ama.frequencyScale,
gravity: ama.gravity,
ledBars: ama.ledBars,
linearAmplitude: ama.linearAmplitude,
linearBoost: ama.linearBoost,
lineWidth: ama.lineWidth,
loRes: ama.loRes,
lumiBars: ama.lumiBars,
maxDecibels: ama.maxDecibels,
maxFPS: ama.maxFPS,
maxFreq: ama.maxFreq,
minDecibels: ama.minDecibels,
minFreq: ama.minFreq,
mirror: ama.mirror,
mode: ama.mode,
noteLabels: ama.noteLabels,
outlineBars: ama.outlineBars,
overlay: true,
peakFadeTime: ama.peakFadeTime,
peakHoldTime: ama.peakHoldTime,
peakLine: ama.peakLine,
radial: ama.radial,
radialInvert: ama.radialInvert,
radius: ama.radius,
reflexAlpha: ama.reflexAlpha,
reflexBright: ama.reflexBright,
reflexFit: ama.reflexFit,
reflexRatio: ama.reflexRatio,
roundBars: ama.roundBars,
showFPS: ama.showFPS,
showPeaks: ama.showPeaks,
showScaleX: ama.showScaleX,
showScaleY: ama.showScaleY,
smoothing: ama.smoothing,
spinSpeed: ama.spinSpeed,
splitGradient: ama.splitGradient,
trueLeds: ama.trueLeds,
volume: ama.volume,
weightingFilter: (ama.weightingFilter || '') as any,
};
}, [visualizer, gradientsRegistered, isCustomGradient]);
const registerCustomGradients = useCallback(
(audioMotionInstance: AudioMotionAnalyzer) => {
if (visualizer.type !== 'audiomotionanalyzer') {
return;
}
const customGradients = visualizer.audiomotionanalyzer.customGradients || [];
customGradients.forEach((gradient) => {
try {
const gradientConfig: {
colorStops: (string | { color: string; level?: number; pos?: number })[];
dir?: string;
} = {
colorStops: gradient.colorStops,
};
if (gradient.dir) {
gradientConfig.dir = gradient.dir;
}
// Type assertion needed as TypeScript definitions may be incomplete
audioMotionInstance.registerGradient(gradient.name, gradientConfig as any);
} catch (error) {
console.error(`Failed to register gradient "${gradient.name}":`, error);
}
});
// Mark gradients as registered
setGradientsRegistered(true);
},
[visualizer],
);
useEffect(() => {
const { context, gains } = webAudio || {};
if (gains && context && canvasRef.current && !motion) {
// Reset gradients registered flag on new instance
setGradientsRegistered(false);
// Create options without custom gradients on first init
const initOptions: any = { ...options };
// Replace custom gradients with default 'classic' for initial setup
if (visualizer.type === 'audiomotionanalyzer') {
const ama = visualizer.audiomotionanalyzer;
if (isCustomGradient(ama.gradient)) {
initOptions.gradient = 'classic';
}
if (isCustomGradient(ama.gradientLeft)) {
initOptions.gradientLeft = 'classic';
}
if (isCustomGradient(ama.gradientRight)) {
initOptions.gradientRight = 'classic';
}
}
const audioMotion = new AudioMotionAnalyzer(canvasRef.current, {
...initOptions,
audioCtx: context,
});
// Register custom gradients (this will set gradientsRegistered to true)
registerCustomGradients(audioMotion);
setMotion(audioMotion);
for (const gain of gains) audioMotion.connectInput(gain);
}
return () => {};
}, [
accent,
canvasRef,
motion,
registerCustomGradients,
webAudio,
visualizer,
options,
isCustomGradient,
]);
// Re-register custom gradients when they change
useEffect(() => {
if (motion && visualizer.type === 'audiomotionanalyzer') {
setGradientsRegistered(false);
registerCustomGradients(motion);
}
}, [
motion,
registerCustomGradients,
visualizer.audiomotionanalyzer.customGradients,
visualizer.type,
]);
// Update visualizer settings when they change
useEffect(() => {
if (motion) {
motion.setOptions(options);
}
}, [motion, options]);
return (
<div className={styles.container}>
<ActionIcon
className={styles.settingsIcon}
icon="settings2"
iconProps={{ size: 'lg' }}
onClick={openVisualizerSettingsModal}
pos="absolute"
right={0}
top={0}
variant="transparent"
/>
<div className={styles.visualizer} ref={canvasRef} />
</div>
);
};
export const Visualizer = () => {
return (
<ComponentErrorBoundary>
<VisualizerInner />
</ComponentErrorBoundary>
);
};
@@ -0,0 +1,7 @@
declare module 'butterchurn' {
export default butterchurn;
}
declare module 'butterchurn-presets' {
export default butterchurnPresets;
}
@@ -0,0 +1,43 @@
.container {
position: relative;
z-index: 50;
width: 100%;
height: 100%;
margin: auto;
&:hover {
.settings-icon {
opacity: 1;
}
}
}
.container .settings-icon {
opacity: 0;
transition: opacity 0.2s ease-in-out;
}
.canvas {
display: block;
width: 100%;
height: 100%;
}
.preset-overlay {
position: absolute;
bottom: 0;
left: 0;
z-index: 10;
padding: var(--theme-spacing-xs) var(--theme-spacing-sm);
font-weight: 500;
color: var(--theme-colors-foreground);
pointer-events: none;
background-color: rgb(0 0 0 / 50%);
border-radius: 0 var(--theme-radius-md) 0 0;
opacity: 0;
transition: opacity 0.2s ease-in-out;
}
.container:hover .preset-overlay {
opacity: 1;
}
@@ -0,0 +1,316 @@
import butterchurn from 'butterchurn';
import butterchurnPresets from 'butterchurn-presets';
import { createRef, useEffect, useRef, useState } from 'react';
import styles from './visualizer.module.css';
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
import { openVisualizerSettingsModal } from '/@/renderer/features/player/utils/open-visualizer-settings-modal';
import { ComponentErrorBoundary } from '/@/renderer/features/shared/components/component-error-boundary';
import { useSettingsStore, useSettingsStoreActions } from '/@/renderer/store';
import { usePlayerStatus } from '/@/renderer/store/player.store';
import { ActionIcon } from '/@/shared/components/action-icon/action-icon';
import { Text } from '/@/shared/components/text/text';
import { PlayerStatus } from '/@/shared/types/types';
type ButterchurnVisualizer = {
connectAudio: (audioNode: AudioNode) => void;
loadPreset: (preset: any, blendTime: number) => void;
render: () => void;
setRendererSize: (width: number, height: number) => void;
};
const VisualizerInner = () => {
const { webAudio } = useWebAudio();
const canvasRef = createRef<HTMLCanvasElement>();
const containerRef = createRef<HTMLDivElement>();
const [visualizer, setVisualizer] = useState<ButterchurnVisualizer>();
const animationFrameRef = useRef<number | undefined>(undefined);
const resizeObserverRef = useRef<ResizeObserver | undefined>(undefined);
const cycleTimerRef = useRef<NodeJS.Timeout | undefined>(undefined);
const cycleStartTimeRef = useRef<number | undefined>(undefined);
const butterchurnSettings = useSettingsStore((store) => store.visualizer.butterchurn);
const { setSettings } = useSettingsStoreActions();
const playerStatus = usePlayerStatus();
const isPlaying = playerStatus === PlayerStatus.PLAYING;
useEffect(() => {
const { context, gains } = webAudio || {};
if (
context &&
gains &&
canvasRef.current &&
containerRef.current &&
!visualizer &&
isPlaying
) {
const canvas = canvasRef.current;
const container = containerRef.current;
const getDimensions = () => {
const rect = container.getBoundingClientRect();
return {
height: rect.height || 600,
width: rect.width || 800,
};
};
let dimensions = getDimensions();
// If dimensions are 0, wait for next frame
if (dimensions.width === 0 || dimensions.height === 0) {
requestAnimationFrame(() => {
dimensions = getDimensions();
if (dimensions.width > 0 && dimensions.height > 0) {
initializeVisualizer(dimensions.width, dimensions.height);
}
});
} else {
initializeVisualizer(dimensions.width, dimensions.height);
}
function initializeVisualizer(width: number, height: number) {
if (!gains || gains.length === 0) return;
canvas.width = width;
canvas.height = height;
try {
const butterchurnInstance = butterchurn.createVisualizer(context, canvas, {
height,
width,
}) as ButterchurnVisualizer;
// Connect to audio gains (use the first gain node)
butterchurnInstance.connectAudio(gains[0]);
// Load preset from settings or default
const presets = butterchurnPresets.getPresets();
const presetNames = Object.keys(presets);
if (presetNames.length > 0) {
const presetName =
butterchurnSettings.currentPreset &&
presets[butterchurnSettings.currentPreset]
? butterchurnSettings.currentPreset
: presetNames[0];
const preset = presets[presetName];
butterchurnInstance.loadPreset(
preset,
butterchurnSettings.blendTime || 0.0,
);
// Initialize cycle timer
cycleStartTimeRef.current = Date.now();
}
setVisualizer(butterchurnInstance);
} catch (error) {
console.error('Failed to create butterchurn visualizer:', error);
}
}
}
return () => {};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [webAudio, canvasRef, containerRef, visualizer, isPlaying]);
// Handle resize
useEffect(() => {
const container = containerRef.current;
if (!container || !visualizer) return;
const handleResize = () => {
const rect = container.getBoundingClientRect();
const width = rect.width;
const height = rect.height;
if (canvasRef.current) {
canvasRef.current.width = width;
canvasRef.current.height = height;
}
visualizer.setRendererSize(width, height);
};
resizeObserverRef.current = new ResizeObserver(handleResize);
resizeObserverRef.current.observe(container);
window.addEventListener('resize', handleResize);
return () => {
window.removeEventListener('resize', handleResize);
};
}, [visualizer, containerRef, canvasRef]);
// Update preset when currentPreset or blendTime changes (but not when cycling)
const isCyclingRef = useRef(false);
useEffect(() => {
if (!visualizer || !butterchurnSettings.currentPreset) return;
// Skip if we're currently cycling (to avoid recreating the visualizer)
if (isCyclingRef.current) {
isCyclingRef.current = false;
return;
}
const presets = butterchurnPresets.getPresets();
const preset = presets[butterchurnSettings.currentPreset];
if (preset) {
visualizer.loadPreset(preset, butterchurnSettings.blendTime || 0.0);
// Reset cycle timer when preset changes manually
cycleStartTimeRef.current = Date.now();
}
}, [visualizer, butterchurnSettings.currentPreset, butterchurnSettings.blendTime]);
// Handle preset cycling
useEffect(() => {
if (!visualizer || !butterchurnSettings.cyclePresets) {
// Clear cycle timer if cycling is disabled
if (cycleTimerRef.current) {
clearInterval(cycleTimerRef.current);
cycleTimerRef.current = undefined;
}
return;
}
const presets = butterchurnPresets.getPresets();
const allPresetNames = Object.keys(presets);
// Get the list of presets to cycle through
const presetList = butterchurnSettings.includeAllPresets
? allPresetNames
: butterchurnSettings.selectedPresets.length > 0
? butterchurnSettings.selectedPresets.filter((name) => presets[name])
: allPresetNames;
if (presetList.length === 0) return;
// Reset cycle timer when settings change
cycleStartTimeRef.current = Date.now();
const cycleToNextPreset = () => {
if (!visualizer) return;
const currentPresetName = butterchurnSettings.currentPreset;
let nextPresetName: string;
if (butterchurnSettings.randomizeNextPreset) {
// Randomly select a preset (excluding current if there are multiple)
const availablePresets =
presetList.length > 1
? presetList.filter((name) => name !== currentPresetName)
: presetList;
const randomIndex = Math.floor(Math.random() * availablePresets.length);
nextPresetName = availablePresets[randomIndex];
} else {
// Cycle to next preset in order
const currentIndex = currentPresetName ? presetList.indexOf(currentPresetName) : -1;
const nextIndex =
currentIndex >= 0 && currentIndex < presetList.length - 1
? currentIndex + 1
: 0;
nextPresetName = presetList[nextIndex];
}
const nextPreset = presets[nextPresetName];
if (nextPreset) {
// Get current settings to ensure we use the latest blendTime
const currentSettings = useSettingsStore.getState().visualizer.butterchurn;
// Mark that we're cycling to prevent the preset change effect from running
isCyclingRef.current = true;
// Load the preset with blending
visualizer.loadPreset(nextPreset, currentSettings.blendTime || 0.0);
// Update currentPreset in settings
const currentVisualizer = useSettingsStore.getState().visualizer;
setSettings({
visualizer: {
...currentVisualizer,
butterchurn: {
...currentVisualizer.butterchurn,
currentPreset: nextPresetName,
},
},
});
cycleStartTimeRef.current = Date.now();
}
};
// Check every second if it's time to cycle
cycleTimerRef.current = setInterval(() => {
if (cycleStartTimeRef.current === undefined) {
cycleStartTimeRef.current = Date.now();
return;
}
const elapsed = (Date.now() - cycleStartTimeRef.current) / 1000; // Convert to seconds
if (elapsed >= butterchurnSettings.cycleTime) {
cycleToNextPreset();
}
}, 1000);
return () => {
if (cycleTimerRef.current) {
clearInterval(cycleTimerRef.current);
cycleTimerRef.current = undefined;
}
};
}, [visualizer, butterchurnSettings, setSettings]);
useEffect(() => {
if (!visualizer) return;
let lastFrameTime = 0;
const maxFPS = butterchurnSettings.maxFPS;
const minFrameInterval = maxFPS > 0 ? 1000 / maxFPS : 0;
const render = (currentTime: number) => {
if (maxFPS === 0 || currentTime - lastFrameTime >= minFrameInterval) {
visualizer.render();
lastFrameTime = currentTime;
}
animationFrameRef.current = requestAnimationFrame(render);
};
animationFrameRef.current = requestAnimationFrame(render);
return () => {
if (animationFrameRef.current) {
cancelAnimationFrame(animationFrameRef.current);
}
};
}, [visualizer, butterchurnSettings.maxFPS]);
return (
<div className={styles.container} ref={containerRef}>
<ActionIcon
className={styles.settingsIcon}
icon="settings2"
iconProps={{ size: 'lg' }}
onClick={openVisualizerSettingsModal}
pos="absolute"
right={0}
top={0}
variant="transparent"
/>
<canvas className={styles.canvas} ref={canvasRef} />
{butterchurnSettings.currentPreset && (
<Text className={styles['preset-overlay']} isNoSelect size="sm">
{butterchurnSettings.currentPreset}
</Text>
)}
</div>
);
};
export const Visualizer = () => {
return (
<ComponentErrorBoundary>
<VisualizerInner />
</ComponentErrorBoundary>
);
};
+2
View File
@@ -8,6 +8,7 @@ import { UpdatePlaylistContextModal } from '/@/renderer/features/playlists/compo
import { SettingsContextModal } from '/@/renderer/features/settings/components/settings-modal';
import { RouterErrorBoundary } from '/@/renderer/features/shared/components/router-error-boundary';
import { ShareItemContextModal } from '/@/renderer/features/sharing/components/share-item-context-modal';
import { VisualizerSettingsContextModal } from '/@/renderer/features/visualizer/components/audiomotionanalyzer/visualizer-settings-modal';
import { AuthenticationOutlet } from '/@/renderer/layouts/authentication-outlet';
import { ResponsiveLayout } from '/@/renderer/layouts/responsive-layout';
import { AppOutlet } from '/@/renderer/router/app-outlet';
@@ -97,6 +98,7 @@ export const AppRouter = () => {
shareItem: ShareItemContextModal,
shuffleAll: ShuffleAllContextModal,
updatePlaylist: UpdatePlaylistContextModal,
visualizerSettings: VisualizerSettingsContextModal,
}}
>
<RouterErrorBoundary>
+182
View File
@@ -216,6 +216,120 @@ const PlayerbarSliderSchema = z.object({
type: PlayerbarSliderTypeSchema,
});
const AudioMotionAnalyzerSettingsSchema = z.object({
alphaBars: z
.boolean()
.describe(
'When set to true each bars amplitude affects its opacity, i.e., higher bars are rendered more opaque while shorter bars are more transparent. This is similar to the lumiBars effect, but bars amplitudes are preserved and it also works on Discrete mode and radial spectrum.',
),
ansiBands: z
.boolean()
.describe(
'When set to true, ANSI/IEC preferred frequencies are used to generate the bands for octave bands modes (see mode). The preferred base-10 scale is used to compute the center and bandedge frequencies, as specified in the ANSI S1.11-2004 standard. When false, bands are based on the equal-tempered scale, so that in 1/12 octave bands the center of each band is perfectly tuned to a musical note.',
),
barSpace: z
.number()
.describe(
'Customize the spacing between bars in frequency bands modes (see mode). Use a value between 0 and 1 for spacing proportional to the band width. Values >= 1 will be considered as a literal number of pixels.',
),
channelLayout: z
.enum(['single', 'dual-combined', 'dual-horizontal', 'dual-vertical'])
.describe('Defines the number and layout of analyzer channels.'),
colorMode: z
.enum(['gradient', 'bar-index', 'bar-level'])
.describe('Selects the desired mode for coloring the analyzer bars.'),
customGradients: z.array(
z.object({
colorStops: z.array(
z.string().or(
z.object({
color: z.string(),
level: z.number().min(0).max(1).optional(),
pos: z.number().min(0).max(1).optional(),
}),
),
),
dir: z.string().optional(),
name: z.string(),
}),
),
fadePeaks: z
.boolean()
.describe(
'When true, peaks fade out instead of falling down. It has no effect when peakLine is active.',
),
fftSize: z
.number()
.describe(
'Number of samples used for the FFT performed by the AnalyzerNode. It must be a power of 2 between 32 and 32768, so valid values are: 32, 64, 128, 256, 512, 1024, 2048, 4096, 8192, 16384, and 32768. Higher values provide more detail in the frequency domain, but less detail in the time domain (slower response), so you may need to adjust smoothing accordingly.',
),
fillAlpha: z.number(),
frequencyScale: z.enum(['bark', 'linear', 'log', 'mel']),
gradient: z.string(),
gradientLeft: z.string().optional(),
gradientRight: z.string().optional(),
gravity: z.number(),
ledBars: z.boolean(),
linearAmplitude: z.boolean(),
linearBoost: z.number(),
lineWidth: z.number(),
loRes: z.boolean(),
lumiBars: z.boolean(),
maxDecibels: z.number(),
maxFPS: z.number(),
maxFreq: z.number(),
minDecibels: z.number(),
minFreq: z.number(),
mirror: z.number(),
mode: z.number(),
noteLabels: z.boolean(),
outlineBars: z.boolean(),
peakFadeTime: z.number(),
peakHoldTime: z.number(),
peakLine: z.boolean(),
presets: z.array(
z.object({
name: z.string(),
value: z.any(),
}),
),
radial: z.boolean(),
radialInvert: z.boolean(),
radius: z.number(),
reflexAlpha: z.number(),
reflexBright: z.number(),
reflexFit: z.boolean(),
reflexRatio: z.number(),
roundBars: z.boolean(),
showFPS: z.boolean(),
showPeaks: z.boolean(),
showScaleX: z.boolean(),
showScaleY: z.boolean(),
smoothing: z.number(),
spinSpeed: z.number(),
splitGradient: z.boolean(),
trueLeds: z.boolean(),
volume: z.number(),
weightingFilter: z.enum(['', 'A', 'B', 'C', 'D', 'Z']),
});
const ButterchurnSettingsSchema = z.object({
blendTime: z.number().min(0).max(10),
currentPreset: z.string().optional(),
cyclePresets: z.boolean(),
cycleTime: z.number().min(1).max(300),
includeAllPresets: z.boolean(),
maxFPS: z.number().min(0),
randomizeNextPreset: z.boolean(),
selectedPresets: z.array(z.string()),
});
const VisualizerSettingsSchema = z.object({
audiomotionanalyzer: AudioMotionAnalyzerSettingsSchema,
butterchurn: ButterchurnSettingsSchema,
type: z.enum(['audiomotionanalyzer', 'butterchurn']),
});
export const GeneralSettingsSchema = z.object({
accent: z
.string()
@@ -440,6 +554,7 @@ export const ValidationSettingsStateSchema = z.object({
z.literal('window'),
z.string(),
]),
visualizer: VisualizerSettingsSchema,
window: WindowSettingsSchema,
});
@@ -1294,6 +1409,70 @@ const initialState: SettingsState = {
username: 'feishin',
},
tab: 'general',
visualizer: {
audiomotionanalyzer: {
alphaBars: false,
ansiBands: false,
barSpace: 0.1,
channelLayout: 'single',
colorMode: 'gradient',
customGradients: [],
fadePeaks: false,
fftSize: 8192,
fillAlpha: 1,
frequencyScale: 'log',
gradient: 'prism',
gravity: 3.8,
ledBars: true,
linearAmplitude: true,
linearBoost: 4,
lineWidth: 0,
loRes: false,
lumiBars: false,
maxDecibels: -25,
maxFPS: 0,
maxFreq: 22000,
minDecibels: -85,
minFreq: 20,
mirror: 0,
mode: 8,
noteLabels: false,
outlineBars: false,
peakFadeTime: 750,
peakHoldTime: 500,
peakLine: false,
presets: [],
radial: false,
radialInvert: false,
radius: 0.6,
reflexAlpha: 0.5,
reflexBright: 1,
reflexFit: false,
reflexRatio: 0,
roundBars: false,
showFPS: false,
showPeaks: false,
showScaleX: false,
showScaleY: false,
smoothing: 0.5,
spinSpeed: 0.5,
splitGradient: false,
trueLeds: false,
volume: 1,
weightingFilter: '',
},
butterchurn: {
blendTime: 2.5,
currentPreset: undefined,
cyclePresets: true,
cycleTime: 30,
includeAllPresets: true,
maxFPS: 0,
randomizeNextPreset: true,
selectedPresets: [],
},
type: 'audiomotionanalyzer',
},
window: {
disableAutoUpdate: false,
exitToTray: false,
@@ -1364,6 +1543,7 @@ export const useSettingsStore = createWithEqualityFn<SettingsSlice>()(
state.queryBuilder = resetState.queryBuilder;
state.remote = resetState.remote;
state.tab = resetState.tab;
state.visualizer = resetState.visualizer;
state.window = resetState.window;
});
},
@@ -1633,3 +1813,5 @@ export const usePlayerbarSlider = () => useSettingsStore((store) => store.genera
export const useGenreTarget = () => useSettingsStore((store) => store.general.genreTarget);
export const useAutoDJSettings = () => useSettingsStore((store) => store.autoDJ, shallow);
export const useVisualizerSettings = () => useSettingsStore((store) => store.visualizer, shallow);