reorganize

This commit is contained in:
jeffvli
2025-11-02 19:04:29 -08:00
parent 79ddd122a4
commit e02a518583
8 changed files with 11 additions and 9 deletions
@@ -1,171 +0,0 @@
import type { RefObject } from 'react';
import { useImperativeHandle, useRef, useState } from 'react';
import ReactPlayer from 'react-player';
import { AudioPlayer } from '/@/renderer/components/audio-player/types';
import { PlayerStatus } from '/@/shared/types/types';
export interface OnProgressProps {
loaded: number;
loadedSeconds: number;
played: number;
playedSeconds: number;
}
export interface WebPlayerEngineHandle extends AudioPlayer {
player1(): {
ref: null | ReactPlayer;
setVolume: (volume: number) => void;
};
player2(): {
ref: null | ReactPlayer;
setVolume: (volume: number) => void;
};
}
interface WebPlayerEngineProps {
isMuted: boolean;
isTransitioning: boolean;
onEndedPlayer1: () => void;
onEndedPlayer2: () => void;
onProgressPlayer1: (e: OnProgressProps) => void;
onProgressPlayer2: (e: OnProgressProps) => void;
playerNum: number;
playerRef: RefObject<WebPlayerEngineHandle>;
playerStatus: PlayerStatus;
speed?: number;
src1: string | undefined;
src2: string | undefined;
volume: number;
}
// Credits: https://gist.github.com/novwhisky/8a1a0168b94f3b6abfaa?permalink_comment_id=1551393#gistcomment-1551393
// This is used so that the player will always have an <audio> element. This means that
// player1Source and player2Source are connected BEFORE the user presses play for
// the first time. This workaround is important for Safari, which seems to require the
// source to be connected PRIOR to resuming audio context
const EMPTY_SOURCE =
'data:audio/mp3;base64,SUQzBAAAAAAAI1RTU0UAAAAPAAADTGF2ZjU2LjM2LjEwMAAAAAAAAAAAAAAA//OEAAAAAAAAAAAAAAAAAAAAAAAASW5mbwAAAA8AAAAEAAABIADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV6urq6urq6urq6urq6urq6urq6urq6urq6v////////////////////////////////8AAAAATGF2YzU2LjQxAAAAAAAAAAAAAAAAJAAAAAAAAAAAASDs90hvAAAAAAAAAAAAAAAAAAAA//MUZAAAAAGkAAAAAAAAA0gAAAAATEFN//MUZAMAAAGkAAAAAAAAA0gAAAAARTMu//MUZAYAAAGkAAAAAAAAA0gAAAAAOTku//MUZAkAAAGkAAAAAAAAA0gAAAAANVVV';
export const WebPlayerEngine = (props: WebPlayerEngineProps) => {
const {
isMuted,
isTransitioning,
onEndedPlayer1,
onEndedPlayer2,
onProgressPlayer1,
onProgressPlayer2,
playerNum,
playerRef,
playerStatus,
speed,
src1,
src2,
volume,
} = props;
const player1Ref = useRef<null | ReactPlayer>(null);
const player2Ref = useRef<null | ReactPlayer>(null);
const [internalVolume1, setInternalVolume1] = useState(volume / 100 || 0);
const [internalVolume2, setInternalVolume2] = useState(volume / 100 || 0);
useImperativeHandle<WebPlayerEngineHandle, WebPlayerEngineHandle>(playerRef, () => ({
decreaseVolume(by: number) {
setInternalVolume1(Math.max(0, internalVolume1 - by / 100));
setInternalVolume2(Math.max(0, internalVolume2 - by / 100));
},
increaseVolume(by: number) {
setInternalVolume1(Math.min(1, internalVolume1 + by / 100));
setInternalVolume2(Math.min(1, internalVolume2 + by / 100));
},
pause() {
player1Ref.current?.getInternalPlayer()?.pause();
player2Ref.current?.getInternalPlayer()?.pause();
},
play() {
if (playerNum === 1) {
player1Ref.current?.getInternalPlayer()?.play();
} else {
player2Ref.current?.getInternalPlayer()?.play();
}
},
player1() {
return {
ref: player1Ref?.current,
setVolume: (volume: number) => setInternalVolume1(volume / 100 || 0),
};
},
player2() {
return {
ref: player2Ref?.current,
setVolume: (volume: number) => setInternalVolume2(volume / 100 || 0),
};
},
seekTo(seekTo: number) {
playerNum === 1
? player1Ref.current?.seekTo(seekTo)
: player2Ref.current?.seekTo(seekTo);
},
setVolume(volume: number) {
setInternalVolume1(volume / 100 || 0);
setInternalVolume2(volume / 100 || 0);
},
setVolume1(volume: number) {
setInternalVolume1(volume / 100 || 0);
},
setVolume2(volume: number) {
setInternalVolume2(volume / 100 || 0);
},
}));
return (
<>
{Boolean(src1) && (
<ReactPlayer
config={{
file: { attributes: { crossOrigin: 'anonymous' }, forceAudio: true },
}}
controls={false}
height={0}
muted={isMuted}
onEnded={src1 ? () => onEndedPlayer1() : undefined}
onProgress={onProgressPlayer1}
playbackRate={speed || 1}
playing={playerNum === 1 && playerStatus === PlayerStatus.PLAYING}
progressInterval={isTransitioning ? 10 : 250}
ref={player1Ref}
url={src1 || EMPTY_SOURCE}
volume={convertToLogVolume(internalVolume1)}
width={0}
/>
)}
{Boolean(src2) && (
<ReactPlayer
config={{
file: { attributes: { crossOrigin: 'anonymous' }, forceAudio: true },
}}
controls={false}
height={0}
muted={isMuted}
onEnded={src2 ? () => onEndedPlayer2() : undefined}
onProgress={onProgressPlayer2}
playbackRate={speed || 1}
playing={playerNum === 2 && playerStatus === PlayerStatus.PLAYING}
progressInterval={isTransitioning ? 10 : 250}
ref={player2Ref}
url={src2 || EMPTY_SOURCE}
volume={convertToLogVolume(internalVolume2)}
width={0}
/>
)}
</>
);
};
WebPlayerEngine.displayName = 'WebPlayerEngine';
function convertToLogVolume(linearVolume: number) {
return Math.pow(linearVolume, 2.0);
}
@@ -1,489 +0,0 @@
import type { Song } from '/@/shared/types/domain-types';
import type { CrossfadeStyle } from '/@/shared/types/types';
import type { ReactPlayerProps } from 'react-player';
import isElectron from 'is-electron';
import {
forwardRef,
useCallback,
useEffect,
useImperativeHandle,
useMemo,
useRef,
useState,
} from 'react';
import ReactPlayer from 'react-player/lazy';
import { api } from '/@/renderer/api';
import {
crossfadeHandler,
gaplessHandler,
} from '/@/renderer/components/audio-player/utils/list-handlers';
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
import {
TranscodingConfig,
usePlaybackSettings,
usePlayerSpeed,
useSettingsStore,
useSettingsStoreActions,
} from '/@/renderer/store';
import { toast } from '/@/shared/components/toast/toast';
import { PlayerStatus, PlayerStyle } from '/@/shared/types/types';
export type AudioPlayerProgress = {
loaded: number;
loadedSeconds: number;
played: number;
playedSeconds: number;
};
interface AudioPlayerProps extends ReactPlayerProps {
autoNext: () => void;
crossfadeDuration: number;
crossfadeStyle: CrossfadeStyle;
currentPlayer: 1 | 2;
muted: boolean;
playbackStyle: PlayerStyle;
player1?: Song;
player2?: Song;
status: PlayerStatus;
volume: number;
}
const getDuration = (ref: any) => {
return ref.current?.player?.player?.player?.duration;
};
// Credits: https://gist.github.com/novwhisky/8a1a0168b94f3b6abfaa?permalink_comment_id=1551393#gistcomment-1551393
// This is used so that the player will always have an <audio> element. This means that
// player1Source and player2Source are connected BEFORE the user presses play for
// the first time. This workaround is important for Safari, which seems to require the
// source to be connected PRIOR to resuming audio context
const EMPTY_SOURCE =
'data:audio/mp3;base64,SUQzBAAAAAAAI1RTU0UAAAAPAAADTGF2ZjU2LjM2LjEwMAAAAAAAAAAAAAAA//OEAAAAAAAAAAAAAAAAAAAAAAAASW5mbwAAAA8AAAAEAAABIADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV6urq6urq6urq6urq6urq6urq6urq6urq6v////////////////////////////////8AAAAATGF2YzU2LjQxAAAAAAAAAAAAAAAAJAAAAAAAAAAAASDs90hvAAAAAAAAAAAAAAAAAAAA//MUZAAAAAGkAAAAAAAAA0gAAAAATEFN//MUZAMAAAGkAAAAAAAAA0gAAAAARTMu//MUZAYAAAGkAAAAAAAAA0gAAAAAOTku//MUZAkAAAGkAAAAAAAAA0gAAAAANVVV';
const useSongUrl = (transcode: TranscodingConfig, current: boolean, song?: Song): null | string => {
const prior = useRef(['', '']);
return useMemo(() => {
if (song?.serverId) {
// If we are the current track, we do not want a transcoding
// reconfiguration to force a restart.
if (current && prior.current[0] === song.uniqueId) {
return prior.current[1];
}
if (!transcode.enabled) {
// transcoding disabled; save the result
prior.current = [song.uniqueId, song.streamUrl];
return song.streamUrl;
}
const result = api.controller.getTranscodingUrl({
apiClientProps: {
serverId: song.serverId,
},
query: {
base: song.streamUrl,
...transcode,
},
})!;
// transcoding enabled; save the updated result
prior.current = [song.uniqueId, result];
return result;
}
// no track; clear result
prior.current = ['', ''];
return null;
}, [current, song?.uniqueId, song?.serverId, song?.streamUrl, transcode]);
};
export interface AudioPlayerRef {
player1: null | ReactPlayer;
player2: null | ReactPlayer;
}
export const AudioPlayer = forwardRef<AudioPlayerRef, AudioPlayerProps>((props, ref) => {
const {
autoNext,
crossfadeDuration,
crossfadeStyle,
currentPlayer,
muted,
playbackStyle,
player1,
player2,
status,
volume,
} = props;
const player1Ref = useRef<ReactPlayer>(null);
const player2Ref = useRef<ReactPlayer>(null);
const [isTransitioning, setIsTransitioning] = useState(false);
const audioDeviceId = useSettingsStore((state) => state.playback.audioDeviceId);
const playback = useSettingsStore((state) => state.playback.mpvProperties);
const shouldUseWebAudio = useSettingsStore((state) => state.playback.webAudio);
const preservesPitch = useSettingsStore((state) => state.playback.preservePitch);
const { resetSampleRate } = useSettingsStoreActions();
const playbackSpeed = usePlayerSpeed();
const { transcode } = usePlaybackSettings();
const stream1 = useSongUrl(transcode, currentPlayer === 1, player1);
const stream2 = useSongUrl(transcode, currentPlayer === 2, player2);
const { setWebAudio, webAudio } = useWebAudio();
const [player1Source, setPlayer1Source] = useState<MediaElementAudioSourceNode | null>(null);
const [player2Source, setPlayer2Source] = useState<MediaElementAudioSourceNode | null>(null);
const calculateReplayGain = useCallback(
(song: Song): number => {
if (playback.replayGainMode === 'no') {
return 1;
}
let gain: number | undefined;
let peak: number | undefined;
if (playback.replayGainMode === 'track') {
gain = song.gain?.track ?? song.gain?.album;
peak = song.peak?.track ?? song.peak?.album;
} else {
gain = song.gain?.album ?? song.gain?.track;
peak = song.peak?.album ?? song.peak?.track;
}
if (gain === undefined) {
gain = playback.replayGainFallbackDB;
if (!gain) {
return 1;
}
}
if (peak === undefined) {
peak = 1;
}
const preAmp = playback.replayGainPreampDB ?? 0;
// https://wiki.hydrogenaud.io/index.php?title=ReplayGain_1.0_specification&section=19
// Normalized to max gain
const expectedGain = 10 ** ((gain + preAmp) / 20);
if (playback.replayGainClip) {
return Math.min(expectedGain, 1 / peak);
}
return expectedGain;
},
[
playback.replayGainClip,
playback.replayGainFallbackDB,
playback.replayGainMode,
playback.replayGainPreampDB,
],
);
useEffect(() => {
if (shouldUseWebAudio && 'AudioContext' in window) {
let context: AudioContext;
try {
context = new AudioContext({
latencyHint: 'playback',
sampleRate: playback.audioSampleRateHz || undefined,
});
} catch (error) {
// In practice, this should never be hit because the UI should validate
// the range. However, the actual supported range is not guaranteed
toast.error({ message: (error as Error).message });
context = new AudioContext({ latencyHint: 'playback' });
resetSampleRate();
}
const gain = context.createGain();
gain.connect(context.destination);
setWebAudio!({ context, gain });
return () => {
return context.close();
};
}
return () => {};
// Intentionally ignore the sample rate dependency, as it makes things really messy
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
useImperativeHandle(ref, () => ({
get player1() {
return player1Ref?.current;
},
get player2() {
return player2Ref?.current;
},
}));
const handleOnEnded = () => {
autoNext();
setIsTransitioning(false);
};
const handleOnError = (playerRef: React.RefObject<ReactPlayer>) => {
return ({ target }: ErrorEvent) => {
const { current: player } = playerRef;
if (!player || !(target instanceof Audio)) {
return;
}
const { error } = target;
console.log('Playback error occurred:', error);
if (
error?.code !== MediaError.MEDIA_ERR_DECODE &&
error?.code !== MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED
) {
return;
}
handleOnEnded();
};
};
useEffect(() => {
if (status === PlayerStatus.PLAYING) {
if (currentPlayer === 1) {
// calling play() is not necessarily a safe option (https://developer.chrome.com/blog/play-request-was-interrupted)
// In practice, this failure is only likely to happen when using the 0-second wav:
// play() + play() in rapid succession will cause problems as the frist one ends the track.
const internalPlayer = player1Ref.current?.getInternalPlayer();
if (internalPlayer) {
internalPlayer.preservesPitch = preservesPitch;
internalPlayer.play().catch(() => {});
}
} else {
const internalPlayer = player2Ref.current?.getInternalPlayer();
if (internalPlayer) {
internalPlayer.preservesPitch = preservesPitch;
internalPlayer.play().catch(() => {});
}
}
} else {
player1Ref.current?.getInternalPlayer()?.pause();
player2Ref.current?.getInternalPlayer()?.pause();
}
}, [currentPlayer, status, preservesPitch]);
const handleCrossfade1 = useCallback(
(e: AudioPlayerProgress) => {
return crossfadeHandler({
currentPlayer,
currentPlayerRef: player1Ref,
currentTime: e.playedSeconds,
duration: getDuration(player1Ref),
fadeDuration: crossfadeDuration,
fadeType: crossfadeStyle,
isTransitioning,
nextPlayerRef: player2Ref,
player: 1,
setIsTransitioning,
volume,
});
},
[crossfadeDuration, crossfadeStyle, currentPlayer, isTransitioning, volume],
);
const handleCrossfade2 = useCallback(
(e: AudioPlayerProgress) => {
return crossfadeHandler({
currentPlayer,
currentPlayerRef: player2Ref,
currentTime: e.playedSeconds,
duration: getDuration(player2Ref),
fadeDuration: crossfadeDuration,
fadeType: crossfadeStyle,
isTransitioning,
nextPlayerRef: player1Ref,
player: 2,
setIsTransitioning,
volume,
});
},
[crossfadeDuration, crossfadeStyle, currentPlayer, isTransitioning, volume],
);
const handleGapless1 = useCallback(
(e: AudioPlayerProgress) => {
return gaplessHandler({
currentTime: e.playedSeconds,
duration: getDuration(player1Ref),
isFlac: player1?.container === 'flac',
isTransitioning,
nextPlayerRef: player2Ref,
setIsTransitioning,
});
},
[isTransitioning, player1?.container],
);
const handleGapless2 = useCallback(
(e: AudioPlayerProgress) => {
return gaplessHandler({
currentTime: e.playedSeconds,
duration: getDuration(player2Ref),
isFlac: player2?.container === 'flac',
isTransitioning,
nextPlayerRef: player1Ref,
setIsTransitioning,
});
},
[isTransitioning, player2?.container],
);
useEffect(() => {
// Not standard, just used in chromium-based browsers. See
// https://developer.chrome.com/blog/audiocontext-setsinkid/.
// If the isElectron() check is every removed, fix this.
if (isElectron() && webAudio && 'setSinkId' in webAudio.context && audioDeviceId) {
const setSink = async () => {
try {
if (webAudio.context.state !== 'closed') {
await (webAudio.context as any).setSinkId(audioDeviceId);
}
} catch (error) {
toast.error({ message: `Error setting sink: ${(error as Error).message}` });
}
};
setSink();
}
}, [audioDeviceId, webAudio]);
useEffect(() => {
if (!webAudio) return;
const sources = [player1Source ? player1 : null, player2Source ? player2 : null];
const current = sources[currentPlayer - 1];
// Set the current replaygain
if (current) {
const newVolume = calculateReplayGain(current) * volume;
webAudio.gain.gain.setValueAtTime(Math.max(0, newVolume), 0);
}
// Set the next track replaygain right before the end of this track
// Attempt to prevent pop-in for web audio.
const next = sources[3 - currentPlayer];
if (next && current) {
const newVolume = calculateReplayGain(next) * volume;
webAudio.gain.gain.setValueAtTime(
Math.max(0, newVolume),
Math.max(0, (current.duration - 1) / 1000),
);
}
}, [
calculateReplayGain,
currentPlayer,
player1,
player1Source,
player2,
player2Source,
volume,
webAudio,
]);
const handlePlayer1Start = useCallback(
async (player: ReactPlayer) => {
if (!webAudio) return;
if (player1Source) {
// This should fire once, only if the source is real (meaning we
// saw the dummy source) and the context is not ready
if (webAudio.context.state !== 'running') {
await webAudio.context.resume();
}
return;
}
const internal = player.getInternalPlayer() as HTMLMediaElement | undefined;
if (internal) {
const { context, gain } = webAudio;
const source = context.createMediaElementSource(internal);
source.connect(gain);
setPlayer1Source(source);
}
},
[player1Source, webAudio],
);
const handlePlayer2Start = useCallback(
async (player: ReactPlayer) => {
if (!webAudio) return;
if (player2Source) {
if (webAudio.context.state !== 'running') {
await webAudio.context.resume();
}
return;
}
const internal = player.getInternalPlayer() as HTMLMediaElement | undefined;
if (internal) {
const { context, gain } = webAudio;
const source = context.createMediaElementSource(internal);
source.connect(gain);
setPlayer2Source(source);
}
},
[player2Source, webAudio],
);
// Bugfix for Safari: rather than use the `<audio>` volume (which doesn't work),
// use the GainNode to scale the volume. In this case, for compatibility with
// other browsers, set the `<audio>` volume to 1
return (
<>
<ReactPlayer
config={{
file: { attributes: { crossOrigin: 'anonymous' }, forceAudio: true },
}}
height={0}
muted={muted}
// If there is no stream url, we do not need to handle when the audio finishes
onEnded={stream1 ? handleOnEnded : undefined}
onError={handleOnError(player1Ref)}
onProgress={
playbackStyle === PlayerStyle.GAPLESS ? handleGapless1 : handleCrossfade1
}
onReady={handlePlayer1Start}
playbackRate={playbackSpeed}
playing={currentPlayer === 1 && status === PlayerStatus.PLAYING}
progressInterval={isTransitioning ? 10 : 250}
ref={player1Ref}
url={stream1 || EMPTY_SOURCE}
volume={webAudio ? 1 : volume}
width={0}
/>
<ReactPlayer
config={{
file: { attributes: { crossOrigin: 'anonymous' }, forceAudio: true },
}}
height={0}
muted={muted}
onEnded={stream2 ? handleOnEnded : undefined}
onError={handleOnError(player2Ref)}
onProgress={
playbackStyle === PlayerStyle.GAPLESS ? handleGapless2 : handleCrossfade2
}
onReady={handlePlayer2Start}
playbackRate={playbackSpeed}
playing={currentPlayer === 2 && status === PlayerStatus.PLAYING}
progressInterval={isTransitioning ? 10 : 250}
ref={player2Ref}
url={stream2 || EMPTY_SOURCE}
volume={webAudio ? 1 : volume}
width={0}
/>
</>
);
});
@@ -1,96 +0,0 @@
import {
QueueData,
subscribeCurrentTrack,
subscribePlayerMute,
subscribePlayerProgress,
subscribePlayerQueue,
subscribePlayerSeekToTimestamp,
subscribePlayerSpeed,
subscribePlayerStatus,
subscribePlayerVolume,
} from '/@/renderer/store';
import { QueueSong } from '/@/shared/types/domain-types';
import { PlayerStatus } from '/@/shared/types/types';
export interface PlayerEvents {
cleanup: () => void;
}
export interface PlayerEventsCallbacks {
onCurrentSongChange?: (
properties: { index: number; song: QueueSong | undefined },
prev: { index: number; song: QueueSong | undefined },
) => void;
onPlayerMute?: (properties: { muted: boolean }, prev: { muted: boolean }) => void;
onPlayerProgress?: (properties: { timestamp: number }, prev: { timestamp: number }) => void;
onPlayerQueueChange?: (queue: QueueData, prev: QueueData) => void;
onPlayerSeek?: (properties: { seconds: number }, prev: { seconds: number }) => void;
onPlayerSeekToTimestamp?: (
properties: { timestamp: number },
prev: { timestamp: number },
) => void;
onPlayerSpeed?: (properties: { speed: number }, prev: { speed: number }) => void;
onPlayerStatus?: (
properties: { song: QueueSong | undefined; status: PlayerStatus },
prev: { song: QueueSong | undefined; status: PlayerStatus },
) => void;
onPlayerVolume?: (properties: { volume: number }, prev: { volume: number }) => void;
}
export function createPlayerEvents(callbacks: PlayerEventsCallbacks): PlayerEvents {
const unsubscribers: (() => void)[] = [];
// Subscribe to current track changes
if (callbacks.onCurrentSongChange) {
const unsubscribe = subscribeCurrentTrack(callbacks.onCurrentSongChange);
unsubscribers.push(unsubscribe);
}
// Subscribe to player progress
if (callbacks.onPlayerProgress) {
const unsubscribe = subscribePlayerProgress(callbacks.onPlayerProgress);
unsubscribers.push(unsubscribe);
}
// Subscribe to queue changes
if (callbacks.onPlayerQueueChange) {
const unsubscribe = subscribePlayerQueue(callbacks.onPlayerQueueChange);
unsubscribers.push(unsubscribe);
}
// Subscribe to seek events
if (callbacks.onPlayerSeekToTimestamp) {
const unsubscribe = subscribePlayerSeekToTimestamp(callbacks.onPlayerSeekToTimestamp);
unsubscribers.push(unsubscribe);
}
// Subscribe to player status changes
if (callbacks.onPlayerStatus) {
const unsubscribe = subscribePlayerStatus(callbacks.onPlayerStatus);
unsubscribers.push(unsubscribe);
}
// Subscribe to volume changes
if (callbacks.onPlayerVolume) {
const unsubscribe = subscribePlayerVolume(callbacks.onPlayerVolume);
unsubscribers.push(unsubscribe);
}
// Subscribe to mute changes
if (callbacks.onPlayerMute) {
const unsubscribe = subscribePlayerMute(callbacks.onPlayerMute);
unsubscribers.push(unsubscribe);
}
// Subscribe to speed changes
if (callbacks.onPlayerSpeed) {
const unsubscribe = subscribePlayerSpeed(callbacks.onPlayerSpeed);
unsubscribers.push(unsubscribe);
}
return {
cleanup: () => {
unsubscribers.forEach((unsubscribe) => unsubscribe());
},
};
}
@@ -1,17 +0,0 @@
import { useEffect } from 'react';
import {
createPlayerEvents,
PlayerEventsCallbacks,
} from '/@/renderer/components/audio-player/listener/player-events';
export function usePlayerEvents(callbacks: PlayerEventsCallbacks, deps: React.DependencyList) {
useEffect(() => {
const engine = createPlayerEvents(callbacks);
return () => {
engine.cleanup();
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [...deps]);
}
@@ -1,8 +0,0 @@
export interface AudioPlayer {
decreaseVolume(by: number): void;
increaseVolume(by: number): void;
pause(): void;
play(): void;
seekTo(seekTo: number): void;
setVolume(volume: number): void;
}
@@ -1,140 +0,0 @@
import type { Dispatch } from 'react';
import { CrossfadeStyle } from '/@/shared/types/types';
export const gaplessHandler = (args: {
currentTime: number;
duration: number;
isFlac: boolean;
isTransitioning: boolean;
nextPlayerRef: any;
setIsTransitioning: Dispatch<boolean>;
}) => {
const { currentTime, duration, isFlac, isTransitioning, nextPlayerRef, setIsTransitioning } =
args;
if (!isTransitioning) {
if (currentTime > duration - 2) {
return setIsTransitioning(true);
}
return null;
}
const durationPadding = isFlac ? 0.065 : 0.116;
if (currentTime + durationPadding >= duration) {
return nextPlayerRef.current
.getInternalPlayer()
?.play()
.catch(() => {});
}
return null;
};
export const crossfadeHandler = (args: {
currentPlayer: 1 | 2;
currentPlayerRef: any;
currentTime: number;
duration: number;
fadeDuration: number;
fadeType: CrossfadeStyle;
isTransitioning: boolean;
nextPlayerRef: any;
player: 1 | 2;
setIsTransitioning: Dispatch<boolean>;
volume: number;
}) => {
const {
currentPlayer,
currentPlayerRef,
currentTime,
duration,
fadeDuration,
fadeType,
isTransitioning,
nextPlayerRef,
player,
setIsTransitioning,
volume,
} = args;
if (!isTransitioning || currentPlayer !== player) {
// check for a large-enough duration, as the default audio element has some dummy audio
const shouldBeginTransition = duration > 0.5 && currentTime >= duration - fadeDuration;
if (shouldBeginTransition) {
setIsTransitioning(true);
return nextPlayerRef.current
.getInternalPlayer()
?.play()
.catch(() => {});
}
return null;
}
const timeLeft = duration - currentTime;
let currentPlayerVolumeCalculation;
let nextPlayerVolumeCalculation;
let percentageOfFadeLeft;
let n;
switch (fadeType) {
case 'dipped':
// https://math.stackexchange.com/a/4622
percentageOfFadeLeft = timeLeft / fadeDuration;
currentPlayerVolumeCalculation = percentageOfFadeLeft ** 2 * volume;
nextPlayerVolumeCalculation = (percentageOfFadeLeft - 1) ** 2 * volume;
break;
case 'equalPower':
// https://dsp.stackexchange.com/a/14755
percentageOfFadeLeft = (timeLeft / fadeDuration) * 2;
currentPlayerVolumeCalculation = Math.sqrt(0.5 * percentageOfFadeLeft) * volume;
nextPlayerVolumeCalculation = Math.sqrt(0.5 * (2 - percentageOfFadeLeft)) * volume;
break;
case fadeType.match(/constantPower.*/)?.input:
// https://math.stackexchange.com/a/26159
n =
fadeType === 'constantPower'
? 0
: fadeType === 'constantPowerSlowFade'
? 1
: fadeType === 'constantPowerSlowCut'
? 3
: 10;
percentageOfFadeLeft = timeLeft / fadeDuration;
currentPlayerVolumeCalculation =
Math.cos((Math.PI / 4) * ((2 * percentageOfFadeLeft - 1) ** (2 * n + 1) - 1)) *
volume;
nextPlayerVolumeCalculation =
Math.cos((Math.PI / 4) * ((2 * percentageOfFadeLeft - 1) ** (2 * n + 1) + 1)) *
volume;
break;
case 'linear':
currentPlayerVolumeCalculation = (timeLeft / fadeDuration) * volume;
nextPlayerVolumeCalculation = ((fadeDuration - timeLeft) / fadeDuration) * volume;
break;
default:
currentPlayerVolumeCalculation = (timeLeft / fadeDuration) * volume;
nextPlayerVolumeCalculation = ((fadeDuration - timeLeft) / fadeDuration) * volume;
break;
}
const currentPlayerVolume =
currentPlayerVolumeCalculation >= 0 ? currentPlayerVolumeCalculation : 0;
const nextPlayerVolume =
nextPlayerVolumeCalculation <= volume ? nextPlayerVolumeCalculation : volume;
if (currentPlayer === 1) {
currentPlayerRef.current.getInternalPlayer().volume = currentPlayerVolume;
nextPlayerRef.current.getInternalPlayer().volume = nextPlayerVolume;
} else {
currentPlayerRef.current.getInternalPlayer().volume = currentPlayerVolume;
nextPlayerRef.current.getInternalPlayer().volume = nextPlayerVolume;
}
// }
return null;
};
@@ -1,322 +0,0 @@
import type { Dispatch } from 'react';
import type ReactPlayer from 'react-player';
import { useCallback, useRef, useState } from 'react';
import { OnProgressProps } from 'react-player/base';
import { WebPlayerEngine, WebPlayerEngineHandle } from './engine/web-player-engine';
import { usePlayerEvents } from '/@/renderer/components/audio-player/listener/use-player-events';
import {
usePlayerActions,
usePlayerData,
usePlayerMuted,
usePlayerProperties,
usePlayerVolume,
} from '/@/renderer/store';
import { PlayerStatus, PlayerStyle } from '/@/shared/types/types';
const PLAY_PAUSE_FADE_DURATION = 300;
const PLAY_PAUSE_FADE_INTERVAL = 10;
export function AudiolingWebPlayer() {
const playerRef = useRef<WebPlayerEngineHandle>(null);
const { player, player1, player2 } = usePlayerData();
const { mediaAutoNext, setProgress } = usePlayerActions();
const { crossfadeDuration, speed, transitionType } = usePlayerProperties();
const isMuted = usePlayerMuted();
const volume = usePlayerVolume();
const [localPlayerStatus, setLocalPlayerStatus] = useState<PlayerStatus>(player.status);
const [isTransitioning, setIsTransitioning] = useState<boolean | string>(false);
const fadeAndSetStatus = useCallback(
async (startVolume: number, endVolume: number, duration: number, status: PlayerStatus) => {
if (isTransitioning) {
return setLocalPlayerStatus(status);
}
const steps = duration / PLAY_PAUSE_FADE_INTERVAL;
const volumeStep = (endVolume - startVolume) / steps;
let currentStep = 0;
const promise = new Promise((resolve) => {
const interval = setInterval(() => {
currentStep++;
const newVolume = startVolume + volumeStep * currentStep;
playerRef.current?.setVolume(newVolume);
if (currentStep >= steps) {
clearInterval(interval);
setIsTransitioning(false);
resolve(true);
}
}, PLAY_PAUSE_FADE_INTERVAL);
});
if (status === PlayerStatus.PAUSED) {
await promise;
setLocalPlayerStatus(status);
} else if (status === PlayerStatus.PLAYING) {
setLocalPlayerStatus(status);
await promise;
}
},
[isTransitioning],
);
const onProgressPlayer1 = useCallback(
(e: OnProgressProps) => {
if (transitionType === 'crossfade' && player.playerNum === 1) {
setProgress(Number(e.playedSeconds.toFixed(0)));
} else if (transitionType === 'gapless') {
setProgress(Number(e.playedSeconds.toFixed(0)));
}
if (!playerRef.current?.player1()) {
return;
}
switch (transitionType) {
case PlayerStyle.CROSSFADE:
crossfadeHandler({
crossfadeDuration: crossfadeDuration,
currentPlayer: playerRef.current.player1(),
currentPlayerNum: player.playerNum,
currentTime: e.playedSeconds,
duration: getDuration(playerRef.current.player1().ref),
isTransitioning,
nextPlayer: playerRef.current.player2(),
playerNum: 1,
setIsTransitioning,
volume,
});
break;
case PlayerStyle.GAPLESS:
gaplessHandler({
currentTime: e.playedSeconds,
duration: getDuration(playerRef.current.player1().ref),
isFlac: false,
isTransitioning,
nextPlayer: playerRef.current.player2(),
setIsTransitioning,
});
break;
}
},
[crossfadeDuration, isTransitioning, player.playerNum, setProgress, transitionType, volume],
);
const onProgressPlayer2 = useCallback(
(e: OnProgressProps) => {
if (transitionType === PlayerStyle.CROSSFADE && player.playerNum === 2) {
setProgress(Number(e.playedSeconds.toFixed(0)));
} else if (transitionType === PlayerStyle.GAPLESS) {
setProgress(Number(e.playedSeconds.toFixed(0)));
}
if (!playerRef.current?.player2()) {
return;
}
switch (transitionType) {
case PlayerStyle.CROSSFADE:
crossfadeHandler({
crossfadeDuration: crossfadeDuration,
currentPlayer: playerRef.current.player2(),
currentPlayerNum: player.playerNum,
currentTime: e.playedSeconds,
duration: getDuration(playerRef.current.player2().ref),
isTransitioning,
nextPlayer: playerRef.current.player1(),
playerNum: 2,
setIsTransitioning,
volume,
});
break;
case PlayerStyle.GAPLESS:
gaplessHandler({
currentTime: e.playedSeconds,
duration: getDuration(playerRef.current.player2().ref),
isFlac: false,
isTransitioning,
nextPlayer: playerRef.current.player1(),
setIsTransitioning,
});
break;
}
},
[crossfadeDuration, isTransitioning, player.playerNum, setProgress, transitionType, volume],
);
const handleOnEndedPlayer1 = useCallback(() => {
const promise = new Promise((resolve) => {
mediaAutoNext();
resolve(true);
});
promise.then(() => {
playerRef.current?.player1()?.ref?.getInternalPlayer().pause();
playerRef.current?.setVolume(volume);
setIsTransitioning(false);
});
}, [mediaAutoNext, volume]);
const handleOnEndedPlayer2 = useCallback(() => {
const promise = new Promise((resolve) => {
mediaAutoNext();
resolve(true);
});
promise.then(() => {
playerRef.current?.player2()?.ref?.getInternalPlayer().pause();
playerRef.current?.setVolume(volume);
setIsTransitioning(false);
});
}, [mediaAutoNext, volume]);
usePlayerEvents(
{
onPlayerSeekToTimestamp: (timestamp) => {
if (player.playerNum === 1) {
playerRef.current?.player1()?.ref?.seekTo(timestamp);
} else {
playerRef.current?.player2()?.ref?.seekTo(timestamp);
}
},
onPlayerStatus: async (status) => {
if (status === PlayerStatus.PAUSED) {
fadeAndSetStatus(volume, 0, PLAY_PAUSE_FADE_DURATION, PlayerStatus.PAUSED);
} else if (status === PlayerStatus.PLAYING) {
fadeAndSetStatus(0, volume, PLAY_PAUSE_FADE_DURATION, PlayerStatus.PLAYING);
}
},
onPlayerVolume: (volume) => {
playerRef.current?.setVolume(volume);
},
},
[volume, player.playerNum, isTransitioning],
);
return (
<WebPlayerEngine
isMuted={isMuted}
isTransitioning={Boolean(isTransitioning)}
onEndedPlayer1={handleOnEndedPlayer1}
onEndedPlayer2={handleOnEndedPlayer2}
onProgressPlayer1={onProgressPlayer1}
onProgressPlayer2={onProgressPlayer2}
playerNum={player.playerNum}
playerRef={playerRef}
playerStatus={localPlayerStatus}
speed={speed}
src1={player1?.streamUrl}
src2={player2?.streamUrl}
volume={volume}
/>
);
}
function crossfadeHandler(args: {
crossfadeDuration: number;
currentPlayer: {
ref: null | ReactPlayer;
setVolume: (volume: number) => void;
};
currentPlayerNum: number;
currentTime: number;
duration: number;
isTransitioning: boolean | string;
nextPlayer: {
ref: null | ReactPlayer;
setVolume: (volume: number) => void;
};
playerNum: number;
setIsTransitioning: Dispatch<boolean | string>;
volume: number;
}) {
const {
crossfadeDuration,
currentPlayer,
currentPlayerNum,
currentTime,
duration,
isTransitioning,
nextPlayer,
playerNum,
setIsTransitioning,
volume,
} = args;
const player = `player${playerNum}`;
if (!isTransitioning) {
if (currentTime > duration - crossfadeDuration) {
nextPlayer.setVolume(0);
nextPlayer.ref?.getInternalPlayer().play();
return setIsTransitioning(player);
}
return;
}
if (isTransitioning !== player && currentPlayerNum !== playerNum) {
return;
}
const timeLeft = duration - currentTime;
// Calculate the volume levels based on time remaining
const currentPlayerVolume = (timeLeft / crossfadeDuration) * volume;
const nextPlayerVolume = ((crossfadeDuration - timeLeft) / crossfadeDuration) * volume;
// Set volumes for both players
currentPlayer.setVolume(currentPlayerVolume);
nextPlayer.setVolume(nextPlayerVolume);
}
function gaplessHandler(args: {
currentTime: number;
duration: number;
isFlac: boolean;
isTransitioning: boolean | string;
nextPlayer: {
ref: null | ReactPlayer;
setVolume: (volume: number) => void;
};
setIsTransitioning: Dispatch<boolean | string>;
}) {
const { currentTime, duration, isFlac, isTransitioning, nextPlayer, setIsTransitioning } = args;
if (!isTransitioning) {
if (currentTime > duration - 2) {
return setIsTransitioning(true);
}
return null;
}
const durationPadding = getDurationPadding(isFlac);
if (currentTime + durationPadding >= duration) {
return nextPlayer.ref
?.getInternalPlayer()
?.play()
.catch(() => {});
}
return null;
}
function getDuration(ref: null | ReactPlayer | undefined) {
return ref?.getInternalPlayer()?.duration || 0;
}
function getDurationPadding(isFlac: boolean) {
switch (isFlac) {
case false:
return 0.116;
case true:
return 0.065;
}
}