Remove stream url, cleanup old audio player (#1269)

* Remove stream url, cleanup old audio player

* remove unused api in playerbar waveform

* make jellyfin transcoding work?
This commit is contained in:
Kendall Garner
2025-11-23 17:12:33 -08:00
committed by jeffvli
parent db110733a4
commit 80419a1edf
13 changed files with 160 additions and 621 deletions
+14 -14
View File
@@ -514,6 +514,20 @@ export const controller: GeneralController = {
query: mergeMusicFolderId(args.query, server),
});
},
getStreamUrl(args) {
const server = getServerById(args.apiClientProps.serverId);
if (!server) {
throw new Error(
`${i18n.t('error.apiRouteError', { postProcess: 'sentenceCase' })}: getStreamUrl`,
);
}
return apiController(
'getStreamUrl',
server.type,
)?.({ ...args, apiClientProps: { ...args.apiClientProps, server } });
},
getStructuredLyrics(args) {
const server = getServerById(args.apiClientProps.serverId);
@@ -556,20 +570,6 @@ export const controller: GeneralController = {
server.type,
)?.({ ...args, apiClientProps: { ...args.apiClientProps, server } });
},
getTranscodingUrl(args) {
const server = getServerById(args.apiClientProps.serverId);
if (!server) {
throw new Error(
`${i18n.t('error.apiRouteError', { postProcess: 'sentenceCase' })}: getTranscodingUrl`,
);
}
return apiController(
'getTranscodingUrl',
server.type,
)?.({ ...args, apiClientProps: { ...args.apiClientProps, server } });
},
getUserList(args) {
const server = getServerById(args.apiClientProps.serverId);
@@ -552,7 +552,7 @@ export const JellyfinController: InternalControllerEndpoint = {
}
return {
items: res.body.Items.map((item) => jfNormalize.song(item, apiClientProps.server, '')),
items: res.body.Items.map((item) => jfNormalize.song(item, apiClientProps.server)),
startIndex: 0,
totalRecordCount: res.body.TotalRecordCount,
};
@@ -602,7 +602,7 @@ export const JellyfinController: InternalControllerEndpoint = {
}
return {
items: res.body.Items.map((item) => jfNormalize.song(item, apiClientProps.server, '')),
items: res.body.Items.map((item) => jfNormalize.song(item, apiClientProps.server)),
startIndex: 0,
totalRecordCount: res.body.Items.length || 0,
};
@@ -647,7 +647,7 @@ export const JellyfinController: InternalControllerEndpoint = {
if (res.status === 200 && res.body.Items.length) {
const results = res.body.Items.reduce<Song[]>((acc, song) => {
if (song.Id !== query.songId) {
acc.push(jfNormalize.song(song, apiClientProps.server, ''));
acc.push(jfNormalize.song(song, apiClientProps.server));
}
return acc;
@@ -676,7 +676,7 @@ export const JellyfinController: InternalControllerEndpoint = {
return mix.body.Items.reduce<Song[]>((acc, song) => {
if (song.Id !== query.songId) {
acc.push(jfNormalize.song(song, apiClientProps.server, ''));
acc.push(jfNormalize.song(song, apiClientProps.server));
}
return acc;
@@ -696,7 +696,7 @@ export const JellyfinController: InternalControllerEndpoint = {
throw new Error('Failed to get song detail');
}
return jfNormalize.song(res.body, apiClientProps.server, '');
return jfNormalize.song(res.body, apiClientProps.server);
},
getSongList: async (args) => {
const { apiClientProps, query } = args;
@@ -809,7 +809,7 @@ export const JellyfinController: InternalControllerEndpoint = {
return {
items: items.map((item) =>
jfNormalize.song(item, apiClientProps.server, '', query.imageSize),
jfNormalize.song(item, apiClientProps.server, query.imageSize),
),
startIndex: query.startIndex,
totalRecordCount,
@@ -820,6 +820,39 @@ export const JellyfinController: InternalControllerEndpoint = {
apiClientProps,
query: { ...query, limit: 1, startIndex: 0 },
}).then((result) => result!.totalRecordCount!),
getStreamUrl: ({ apiClientProps: { server }, query }) => {
const { bitrate, format, id, transcode } = query;
const deviceId = '';
let url =
`${server?.url}/audio` +
`/${id}/universal` +
`?userId=${server?.userId}` +
`&deviceId=${deviceId}` +
'&audioCodec=aac' +
`&apiKey=${server?.credential}` +
`&playSessionId=${deviceId}` +
'&container=opus,mp3,aac,m4a,m4b,flac,wav,ogg';
if (transcode) {
// Some format appears to be required. Fall back to trusty MP3 if not specified
// Otherwise, ffmpeg appears to crash
const realFormat = format || 'mp3';
url += `&transcodingProtocol=http&transcodingContainer=${realFormat}`;
url = url.replace('audioCodec=aac', `audioCodec=${realFormat}`);
url = url.replace(
'&container=opus,mp3,aac,m4a,m4b,flac,wav,ogg',
`&container=${realFormat}`,
);
if (bitrate !== undefined) {
url += `&maxStreamingBitrate=${bitrate * 1000}`;
}
}
return url;
},
getTags: async (args) => {
const { apiClientProps, query } = args;
@@ -873,24 +906,11 @@ export const JellyfinController: InternalControllerEndpoint = {
}
return {
items: res.body.Items.map((item) => jfNormalize.song(item, apiClientProps.server, '')),
items: res.body.Items.map((item) => jfNormalize.song(item, apiClientProps.server)),
startIndex: 0,
totalRecordCount: res.body.TotalRecordCount,
};
},
getTranscodingUrl: (args) => {
const { base, bitrate, format } = args.query;
let url = base.replace('transcodingProtocol=hls', 'transcodingProtocol=http');
if (format) {
url = url.replace('audioCodec=aac', `audioCodec=${format}`);
url = url.replace('transcodingContainer=ts', `transcodingContainer=${format}`);
}
if (bitrate !== undefined) {
url += `&maxStreamingBitrate=${bitrate * 1000}`;
}
return url;
},
movePlaylistItem: async (args) => {
const { apiClientProps, query } = args;
@@ -1082,7 +1102,7 @@ export const JellyfinController: InternalControllerEndpoint = {
jfNormalize.albumArtist(item, apiClientProps.server),
),
albums: albums.map((item) => jfNormalize.album(item, apiClientProps.server)),
songs: songs.map((item) => jfNormalize.song(item, apiClientProps.server, '')),
songs: songs.map((item) => jfNormalize.song(item, apiClientProps.server)),
};
},
updatePlaylist: async (args) => {
@@ -605,6 +605,7 @@ export const NavidromeController: InternalControllerEndpoint = {
apiClientProps,
query: { ...query, limit: 1, startIndex: 0 },
}).then((result) => result!.totalRecordCount!),
getStreamUrl: SubsonicController.getStreamUrl,
getStructuredLyrics: SubsonicController.getStructuredLyrics,
getTags: async (args) => {
const { apiClientProps } = args;
@@ -646,7 +647,6 @@ export const NavidromeController: InternalControllerEndpoint = {
};
},
getTopSongs: SubsonicController.getTopSongs,
getTranscodingUrl: SubsonicController.getTranscodingUrl,
getUserList: async (args) => {
const { apiClientProps, query } = args;
@@ -1246,6 +1246,21 @@ export const SubsonicController: InternalControllerEndpoint = {
return totalRecordCount;
},
getStreamUrl: ({ apiClientProps: { server }, query }) => {
const { bitrate, format, id, transcode } = query;
let url = `${server?.url}/rest/stream.view?id=${id}&v=1.13.0&c=Feishin&${server?.credential}`;
if (transcode) {
if (format) {
url += `&format=${format}`;
}
if (bitrate !== undefined) {
url += `&maxBitRate=${bitrate}`;
}
}
return url;
},
getStructuredLyrics: async (args) => {
const { apiClientProps, query } = args;
@@ -1311,18 +1326,6 @@ export const SubsonicController: InternalControllerEndpoint = {
totalRecordCount: res.body.topSongs?.song?.length || 0,
};
},
getTranscodingUrl: (args) => {
const { base, bitrate, format } = args.query;
let url = base;
if (format) {
url += `&format=${format}`;
}
if (bitrate !== undefined) {
url += `&maxBitRate=${bitrate}`;
}
return url;
},
removeFromPlaylist: async ({ apiClientProps, query }) => {
const res = await ssApiClient(apiClientProps).updatePlaylist({
query: {
@@ -0,0 +1,49 @@
import { useMemo, useRef } from 'react';
import { api } from '/@/renderer/api';
import { TranscodingConfig } from '/@/renderer/store';
import { QueueSong } from '/@/shared/types/domain-types';
export function useSongUrl(
song: QueueSong | undefined,
current: boolean,
transcode: TranscodingConfig,
): string | undefined {
const prior = useRef(['', '']);
return useMemo(() => {
if (song?._serverId) {
// If we are the current track, we do not want a transcoding
// reconfiguration to force a restart.
if (current && prior.current[0] === song._uniqueId) {
return prior.current[1];
}
const url = api.controller.getStreamUrl({
apiClientProps: { serverId: song._serverId },
query: {
bitrate: transcode.bitrate,
format: transcode.format,
id: song.id,
transcode: transcode.enabled,
},
});
// transcoding enabled; save the updated result
prior.current = [song._uniqueId, url];
return url;
}
// no track; clear result
prior.current = ['', ''];
return undefined;
}, [
song?._serverId,
song?._uniqueId,
song?.id,
current,
transcode.bitrate,
transcode.format,
transcode.enabled,
]);
}
@@ -1,493 +0,0 @@
import type { QueueSong, Song } from '/@/shared/types/domain-types';
import type { CrossfadeStyle } from '/@/shared/types/types';
import type { ReactPlayerProps } from 'react-player';
import isElectron from 'is-electron';
import {
forwardRef,
useCallback,
useEffect,
useImperativeHandle,
useMemo,
useRef,
useState,
} from 'react';
import ReactPlayer from 'react-player/lazy';
import { api } from '/@/renderer/api';
import {
crossfadeHandler,
gaplessHandler,
} from '/@/renderer/features/player/audio-player/utils/list-handlers';
import { useWebAudio } from '/@/renderer/features/player/hooks/use-webaudio';
import {
TranscodingConfig,
usePlaybackSettings,
usePlayerSpeed,
useSettingsStore,
useSettingsStoreActions,
} from '/@/renderer/store';
import { toast } from '/@/shared/components/toast/toast';
import { PlayerStatus, PlayerStyle } from '/@/shared/types/types';
export type AudioPlayerProgress = {
loaded: number;
loadedSeconds: number;
played: number;
playedSeconds: number;
};
interface AudioPlayerProps extends ReactPlayerProps {
autoNext: () => void;
crossfadeDuration: number;
crossfadeStyle: CrossfadeStyle;
currentPlayer: 1 | 2;
muted: boolean;
playbackStyle: PlayerStyle;
player1?: Song;
player2?: Song;
status: PlayerStatus;
volume: number;
}
const getDuration = (ref: any) => {
return ref.current?.player?.player?.player?.duration;
};
// Credits: https://gist.github.com/novwhisky/8a1a0168b94f3b6abfaa?permalink_comment_id=1551393#gistcomment-1551393
// This is used so that the player will always have an <audio> element. This means that
// player1Source and player2Source are connected BEFORE the user presses play for
// the first time. This workaround is important for Safari, which seems to require the
// source to be connected PRIOR to resuming audio context
const EMPTY_SOURCE =
'data:audio/mp3;base64,SUQzBAAAAAAAI1RTU0UAAAAPAAADTGF2ZjU2LjM2LjEwMAAAAAAAAAAAAAAA//OEAAAAAAAAAAAAAAAAAAAAAAAASW5mbwAAAA8AAAAEAAABIADAwMDAwMDAwMDAwMDAwMDAwMDAwMDAwMDV1dXV1dXV1dXV1dXV1dXV1dXV1dXV1dXV6urq6urq6urq6urq6urq6urq6urq6urq6v////////////////////////////////8AAAAATGF2YzU2LjQxAAAAAAAAAAAAAAAAJAAAAAAAAAAAASDs90hvAAAAAAAAAAAAAAAAAAAA//MUZAAAAAGkAAAAAAAAA0gAAAAATEFN//MUZAMAAAGkAAAAAAAAA0gAAAAARTMu//MUZAYAAAGkAAAAAAAAA0gAAAAAOTku//MUZAkAAAGkAAAAAAAAA0gAAAAANVVV';
const useSongUrl = (
transcode: TranscodingConfig,
current: boolean,
song?: QueueSong,
): null | string => {
const prior = useRef(['', '']);
return useMemo(() => {
if (song?._serverId) {
// If we are the current track, we do not want a transcoding
// reconfiguration to force a restart.
if (current && prior.current[0] === song._uniqueId) {
return prior.current[1];
}
if (!transcode.enabled) {
// transcoding disabled; save the result
prior.current = [song._uniqueId, song.streamUrl];
return song.streamUrl;
}
const result = api.controller.getTranscodingUrl({
apiClientProps: {
serverId: song._serverId,
},
query: {
base: song.streamUrl,
...transcode,
},
})!;
// transcoding enabled; save the updated result
prior.current = [song._uniqueId, result];
return result;
}
// no track; clear result
prior.current = ['', ''];
return null;
}, [current, song?._uniqueId, song?._serverId, song?.streamUrl, transcode]);
};
export interface AudioPlayerRef {
player1: null | ReactPlayer;
player2: null | ReactPlayer;
}
export const AudioPlayer = forwardRef<AudioPlayerRef, AudioPlayerProps>((props, ref) => {
const {
autoNext,
crossfadeDuration,
crossfadeStyle,
currentPlayer,
muted,
playbackStyle,
player1,
player2,
status,
volume,
} = props;
const player1Ref = useRef<ReactPlayer>(null);
const player2Ref = useRef<ReactPlayer>(null);
const [isTransitioning, setIsTransitioning] = useState(false);
const audioDeviceId = useSettingsStore((state) => state.playback.audioDeviceId);
const playback = useSettingsStore((state) => state.playback.mpvProperties);
const shouldUseWebAudio = useSettingsStore((state) => state.playback.webAudio);
const preservesPitch = useSettingsStore((state) => state.playback.preservePitch);
const { resetSampleRate } = useSettingsStoreActions();
const playbackSpeed = usePlayerSpeed();
const { transcode } = usePlaybackSettings();
const stream1 = useSongUrl(transcode, currentPlayer === 1, player1);
const stream2 = useSongUrl(transcode, currentPlayer === 2, player2);
const { setWebAudio, webAudio } = useWebAudio();
const [player1Source, setPlayer1Source] = useState<MediaElementAudioSourceNode | null>(null);
const [player2Source, setPlayer2Source] = useState<MediaElementAudioSourceNode | null>(null);
const calculateReplayGain = useCallback(
(song: Song): number => {
if (playback.replayGainMode === 'no') {
return 1;
}
let gain: number | undefined;
let peak: number | undefined;
if (playback.replayGainMode === 'track') {
gain = song.gain?.track ?? song.gain?.album;
peak = song.peak?.track ?? song.peak?.album;
} else {
gain = song.gain?.album ?? song.gain?.track;
peak = song.peak?.album ?? song.peak?.track;
}
if (gain === undefined) {
gain = playback.replayGainFallbackDB;
if (!gain) {
return 1;
}
}
if (peak === undefined) {
peak = 1;
}
const preAmp = playback.replayGainPreampDB ?? 0;
// https://wiki.hydrogenaud.io/index.php?title=ReplayGain_1.0_specification&section=19
// Normalized to max gain
const expectedGain = 10 ** ((gain + preAmp) / 20);
if (playback.replayGainClip) {
return Math.min(expectedGain, 1 / peak);
}
return expectedGain;
},
[
playback.replayGainClip,
playback.replayGainFallbackDB,
playback.replayGainMode,
playback.replayGainPreampDB,
],
);
useEffect(() => {
if (shouldUseWebAudio && 'AudioContext' in window) {
let context: AudioContext;
try {
context = new AudioContext({
latencyHint: 'playback',
sampleRate: playback.audioSampleRateHz || undefined,
});
} catch (error) {
// In practice, this should never be hit because the UI should validate
// the range. However, the actual supported range is not guaranteed
toast.error({ message: (error as Error).message });
context = new AudioContext({ latencyHint: 'playback' });
resetSampleRate();
}
const gain = context.createGain();
gain.connect(context.destination);
setWebAudio!({ context, gain });
return () => {
return context.close();
};
}
return () => {};
// Intentionally ignore the sample rate dependency, as it makes things really messy
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
useImperativeHandle(ref, () => ({
get player1() {
return player1Ref?.current;
},
get player2() {
return player2Ref?.current;
},
}));
const handleOnEnded = () => {
autoNext();
setIsTransitioning(false);
};
const handleOnError = (playerRef: React.RefObject<ReactPlayer>) => {
return ({ target }: ErrorEvent) => {
const { current: player } = playerRef;
if (!player || !(target instanceof Audio)) {
return;
}
const { error } = target;
console.log('Playback error occurred:', error);
if (
error?.code !== MediaError.MEDIA_ERR_DECODE &&
error?.code !== MediaError.MEDIA_ERR_SRC_NOT_SUPPORTED
) {
return;
}
handleOnEnded();
};
};
useEffect(() => {
if (status === PlayerStatus.PLAYING) {
if (currentPlayer === 1) {
// calling play() is not necessarily a safe option (https://developer.chrome.com/blog/play-request-was-interrupted)
// In practice, this failure is only likely to happen when using the 0-second wav:
// play() + play() in rapid succession will cause problems as the frist one ends the track.
const internalPlayer = player1Ref.current?.getInternalPlayer();
if (internalPlayer) {
internalPlayer.preservesPitch = preservesPitch;
internalPlayer.play().catch(() => {});
}
} else {
const internalPlayer = player2Ref.current?.getInternalPlayer();
if (internalPlayer) {
internalPlayer.preservesPitch = preservesPitch;
internalPlayer.play().catch(() => {});
}
}
} else {
player1Ref.current?.getInternalPlayer()?.pause();
player2Ref.current?.getInternalPlayer()?.pause();
}
}, [currentPlayer, status, preservesPitch]);
const handleCrossfade1 = useCallback(
(e: AudioPlayerProgress) => {
return crossfadeHandler({
currentPlayer,
currentPlayerRef: player1Ref,
currentTime: e.playedSeconds,
duration: getDuration(player1Ref),
fadeDuration: crossfadeDuration,
fadeType: crossfadeStyle,
isTransitioning,
nextPlayerRef: player2Ref,
player: 1,
setIsTransitioning,
volume,
});
},
[crossfadeDuration, crossfadeStyle, currentPlayer, isTransitioning, volume],
);
const handleCrossfade2 = useCallback(
(e: AudioPlayerProgress) => {
return crossfadeHandler({
currentPlayer,
currentPlayerRef: player2Ref,
currentTime: e.playedSeconds,
duration: getDuration(player2Ref),
fadeDuration: crossfadeDuration,
fadeType: crossfadeStyle,
isTransitioning,
nextPlayerRef: player1Ref,
player: 2,
setIsTransitioning,
volume,
});
},
[crossfadeDuration, crossfadeStyle, currentPlayer, isTransitioning, volume],
);
const handleGapless1 = useCallback(
(e: AudioPlayerProgress) => {
return gaplessHandler({
currentTime: e.playedSeconds,
duration: getDuration(player1Ref),
isFlac: player1?.container === 'flac',
isTransitioning,
nextPlayerRef: player2Ref,
setIsTransitioning,
});
},
[isTransitioning, player1?.container],
);
const handleGapless2 = useCallback(
(e: AudioPlayerProgress) => {
return gaplessHandler({
currentTime: e.playedSeconds,
duration: getDuration(player2Ref),
isFlac: player2?.container === 'flac',
isTransitioning,
nextPlayerRef: player1Ref,
setIsTransitioning,
});
},
[isTransitioning, player2?.container],
);
useEffect(() => {
// Not standard, just used in chromium-based browsers. See
// https://developer.chrome.com/blog/audiocontext-setsinkid/.
// If the isElectron() check is every removed, fix this.
if (isElectron() && webAudio && 'setSinkId' in webAudio.context && audioDeviceId) {
const setSink = async () => {
try {
if (webAudio.context.state !== 'closed') {
await (webAudio.context as any).setSinkId(audioDeviceId);
}
} catch (error) {
toast.error({ message: `Error setting sink: ${(error as Error).message}` });
}
};
setSink();
}
}, [audioDeviceId, webAudio]);
useEffect(() => {
if (!webAudio) return;
const sources = [player1Source ? player1 : null, player2Source ? player2 : null];
const current = sources[currentPlayer - 1];
// Set the current replaygain
if (current) {
const newVolume = calculateReplayGain(current) * volume;
webAudio.gain.gain.setValueAtTime(Math.max(0, newVolume), 0);
}
// Set the next track replaygain right before the end of this track
// Attempt to prevent pop-in for web audio.
const next = sources[3 - currentPlayer];
if (next && current) {
const newVolume = calculateReplayGain(next) * volume;
webAudio.gain.gain.setValueAtTime(
Math.max(0, newVolume),
Math.max(0, (current.duration - 1) / 1000),
);
}
}, [
calculateReplayGain,
currentPlayer,
player1,
player1Source,
player2,
player2Source,
volume,
webAudio,
]);
const handlePlayer1Start = useCallback(
async (player: ReactPlayer) => {
if (!webAudio) return;
if (player1Source) {
// This should fire once, only if the source is real (meaning we
// saw the dummy source) and the context is not ready
if (webAudio.context.state !== 'running') {
await webAudio.context.resume();
}
return;
}
const internal = player.getInternalPlayer() as HTMLMediaElement | undefined;
if (internal) {
const { context, gain } = webAudio;
const source = context.createMediaElementSource(internal);
source.connect(gain);
setPlayer1Source(source);
}
},
[player1Source, webAudio],
);
const handlePlayer2Start = useCallback(
async (player: ReactPlayer) => {
if (!webAudio) return;
if (player2Source) {
if (webAudio.context.state !== 'running') {
await webAudio.context.resume();
}
return;
}
const internal = player.getInternalPlayer() as HTMLMediaElement | undefined;
if (internal) {
const { context, gain } = webAudio;
const source = context.createMediaElementSource(internal);
source.connect(gain);
setPlayer2Source(source);
}
},
[player2Source, webAudio],
);
// Bugfix for Safari: rather than use the `<audio>` volume (which doesn't work),
// use the GainNode to scale the volume. In this case, for compatibility with
// other browsers, set the `<audio>` volume to 1
return (
<>
<ReactPlayer
config={{
file: { attributes: { crossOrigin: 'anonymous' }, forceAudio: true },
}}
height={0}
muted={muted}
// If there is no stream url, we do not need to handle when the audio finishes
onEnded={stream1 ? handleOnEnded : undefined}
onError={handleOnError(player1Ref)}
onProgress={
playbackStyle === PlayerStyle.GAPLESS ? handleGapless1 : handleCrossfade1
}
onReady={handlePlayer1Start}
playbackRate={playbackSpeed}
playing={currentPlayer === 1 && status === PlayerStatus.PLAYING}
progressInterval={isTransitioning ? 10 : 250}
ref={player1Ref}
url={stream1 || EMPTY_SOURCE}
volume={webAudio ? 1 : volume}
width={0}
/>
<ReactPlayer
config={{
file: { attributes: { crossOrigin: 'anonymous' }, forceAudio: true },
}}
height={0}
muted={muted}
onEnded={stream2 ? handleOnEnded : undefined}
onError={handleOnError(player2Ref)}
onProgress={
playbackStyle === PlayerStyle.GAPLESS ? handleGapless2 : handleCrossfade2
}
onReady={handlePlayer2Start}
playbackRate={playbackSpeed}
playing={currentPlayer === 2 && status === PlayerStatus.PLAYING}
progressInterval={isTransitioning ? 10 : 250}
ref={player2Ref}
url={stream2 || EMPTY_SOURCE}
volume={webAudio ? 1 : volume}
width={0}
/>
</>
);
});
@@ -5,7 +5,9 @@ import { MpvPlayerEngine, MpvPlayerEngineHandle } from './engine/mpv-player-engi
import { useMainPlayerListener } from '/@/renderer/features/player/audio-player/hooks/use-main-player-listener';
import { usePlayerEvents } from '/@/renderer/features/player/audio-player/hooks/use-player-events';
import { useSongUrl } from '/@/renderer/features/player/audio-player/hooks/use-stream-url';
import {
usePlaybackSettings,
usePlayerActions,
usePlayerData,
usePlayerMuted,
@@ -26,6 +28,7 @@ export function MpvPlayer() {
const { speed } = usePlayerProperties();
const isMuted = usePlayerMuted();
const volume = usePlayerVolume();
const { transcode } = usePlaybackSettings();
const [localPlayerStatus, setLocalPlayerStatus] = useState<PlayerStatus>(status);
const [isTransitioning, setIsTransitioning] = useState(false);
@@ -132,12 +135,15 @@ export function MpvPlayer() {
useMainPlayerListener();
const currentUrl = useSongUrl(currentSong, true, transcode);
const nextUrl = useSongUrl(nextSong, false, transcode);
return (
<MpvPlayerEngine
currentSrc={currentSong?.streamUrl}
currentSrc={currentUrl}
isMuted={isMuted}
isTransitioning={isTransitioning}
nextSrc={nextSong?.streamUrl}
nextSrc={nextUrl}
onEnded={handleOnEnded}
onProgress={onProgress}
playerRef={playerRef}
@@ -9,8 +9,10 @@ import {
} from '/@/renderer/features/player/audio-player/engine/wavesurfer-player-engine';
import { useMainPlayerListener } from '/@/renderer/features/player/audio-player/hooks/use-main-player-listener';
import { usePlayerEvents } from '/@/renderer/features/player/audio-player/hooks/use-player-events';
import { useSongUrl } from '/@/renderer/features/player/audio-player/hooks/use-stream-url';
import { PlayerOnProgressProps } from '/@/renderer/features/player/audio-player/types';
import {
usePlaybackSettings,
usePlayerActions,
usePlayerData,
usePlayerMuted,
@@ -29,6 +31,7 @@ export function WaveSurferPlayer() {
const { crossfadeDuration, speed, transitionType } = usePlayerProperties();
const isMuted = usePlayerMuted();
const volume = usePlayerVolume();
const { transcode } = usePlaybackSettings();
const [localPlayerStatus, setLocalPlayerStatus] = useState<PlayerStatus>(status);
const [isTransitioning, setIsTransitioning] = useState<boolean | string>(false);
@@ -229,6 +232,9 @@ export function WaveSurferPlayer() {
useMainPlayerListener();
const player1Url = useSongUrl(player1, num === 1, transcode);
const player2Url = useSongUrl(player2, num === 2, transcode);
return (
<WaveSurferPlayerEngine
isMuted={isMuted}
@@ -241,8 +247,8 @@ export function WaveSurferPlayer() {
playerRef={playerRef}
playerStatus={localPlayerStatus}
speed={speed}
src1={player1?.streamUrl}
src2={player2?.streamUrl}
src1={player1Url}
src2={player2Url}
volume={volume}
/>
);
@@ -9,8 +9,10 @@ import {
} from '/@/renderer/features/player/audio-player/engine/web-player-engine';
import { useMainPlayerListener } from '/@/renderer/features/player/audio-player/hooks/use-main-player-listener';
import { usePlayerEvents } from '/@/renderer/features/player/audio-player/hooks/use-player-events';
import { useSongUrl } from '/@/renderer/features/player/audio-player/hooks/use-stream-url';
import { PlayerOnProgressProps } from '/@/renderer/features/player/audio-player/types';
import {
usePlaybackSettings,
usePlayerActions,
usePlayerData,
usePlayerMuted,
@@ -29,6 +31,7 @@ export function WebPlayer() {
const { crossfadeDuration, crossfadeStyle, speed, transitionType } = usePlayerProperties();
const isMuted = usePlayerMuted();
const volume = usePlayerVolume();
const { transcode } = usePlaybackSettings();
const [localPlayerStatus, setLocalPlayerStatus] = useState<PlayerStatus>(status);
const [isTransitioning, setIsTransitioning] = useState<boolean | string>(false);
@@ -261,6 +264,9 @@ export function WebPlayer() {
useMainPlayerListener();
const player1Url = useSongUrl(player1, num === 1, transcode);
const player2Url = useSongUrl(player2, num === 2, transcode);
return (
<WebPlayerEngine
isMuted={isMuted}
@@ -273,8 +279,8 @@ export function WebPlayer() {
playerRef={playerRef}
playerStatus={localPlayerStatus}
speed={speed}
src1={player1?.streamUrl}
src2={player2?.streamUrl}
src1={player1Url}
src2={player2Url}
volume={volume}
/>
);
@@ -6,7 +6,7 @@ import { useEffect, useMemo, useRef, useState } from 'react';
import { CustomPlayerbarSlider } from './playerbar-slider';
import styles from './playerbar-waveform.module.css';
import { api } from '/@/renderer/api';
import { useSongUrl } from '/@/renderer/features/player/audio-player/hooks/use-stream-url';
import { usePlayer } from '/@/renderer/features/player/context/player-context';
import {
BarAlign,
@@ -37,26 +37,7 @@ export const PlayerbarWaveform = () => {
const songDuration = currentSong?.duration ? currentSong.duration / 1000 : 0;
// Get the stream URL with transcoding support
const streamUrl = useMemo(() => {
if (!currentSong?._serverId || !currentSong?.streamUrl) {
return null;
}
if (!transcode.enabled) {
return currentSong.streamUrl;
}
return api.controller.getTranscodingUrl({
apiClientProps: {
serverId: currentSong._serverId,
},
query: {
base: currentSong.streamUrl,
...transcode,
},
});
}, [currentSong, transcode]);
const streamUrl = useSongUrl(currentSong, true, transcode);
const primaryColor = usePrimaryColor();
+1 -37
View File
@@ -13,30 +13,6 @@ import {
} from '/@/shared/types/domain-types';
import { ServerListItem, ServerType } from '/@/shared/types/types';
const getStreamUrl = (args: {
container?: string;
deviceId: string;
eTag?: string;
id: string;
mediaSourceId?: string;
server: null | ServerListItem;
}) => {
const { deviceId, id, server } = args;
return (
`${server?.url}/audio` +
`/${id}/universal` +
`?userId=${server?.userId}` +
`&deviceId=${deviceId}` +
'&audioCodec=aac' +
`&apiKey=${server?.credential}` +
`&playSessionId=${deviceId}` +
'&container=opus,mp3,aac,m4a,m4b,flac,wav,ogg' +
'&transcodingContainer=ts' +
'&transcodingProtocol=http'
);
};
const getAlbumArtistCoverArtUrl = (args: {
baseUrl: string;
item: z.infer<typeof jfType._response.albumArtist>;
@@ -182,7 +158,6 @@ const getTags = (item: AlbumOrSong): null | Record<string, string[]> => {
const normalizeSong = (
item: z.infer<typeof jfType._response.song>,
server: null | ServerListItem,
deviceId: string,
imageSize?: number,
): Song => {
let bitRate = 0;
@@ -191,7 +166,6 @@ const normalizeSong = (
let path: null | string = null;
let sampleRate: null | number = null;
let size = 0;
let streamUrl = '';
if (item.MediaSources?.length) {
const source = item.MediaSources[0];
@@ -200,15 +174,6 @@ const normalizeSong = (
path = source.Path;
size = source.Size;
streamUrl = getStreamUrl({
container: container,
deviceId,
eTag: source.ETag,
id: item.Id,
mediaSourceId: source.Id,
server,
});
if ((source.MediaStreams?.length || 0) > 0) {
for (const stream of source.MediaStreams) {
if (stream.Type === 'Audio') {
@@ -296,7 +261,6 @@ const normalizeSong = (
releaseYear: item.ProductionYear || null,
sampleRate,
size,
streamUrl,
tags: getTags(item),
trackNumber: item.IndexNumber,
updatedAt: item.DateCreated,
@@ -361,7 +325,7 @@ const normalizeAlbum = (
releaseYear: item.ProductionYear || null,
size: null,
songCount: item?.ChildCount || null,
songs: item.Songs?.map((song) => normalizeSong(song, server, '', imageSize)),
songs: item.Songs?.map((song) => normalizeSong(song, server, imageSize)),
tags: getTags(item),
updatedAt: item?.DateLastMediaAdded || item.DateCreated,
userFavorite: item.UserData?.IsFavorite || false,
@@ -136,8 +136,6 @@ const normalizeSong = (
size: size || 300,
}) || null;
const streamUrl = `${server?.url}/rest/stream.view?id=${item.id}&v=1.13.0&c=Feishin&${server?.credential}`;
return {
_itemType: LibraryItem.SONG,
_serverId: server?.id || 'unknown',
@@ -194,7 +192,6 @@ const normalizeSong = (
releaseYear: item.year || null,
sampleRate: item.samplingRate || null,
size: item.size,
streamUrl,
tags: null,
trackNumber: item.track || 1,
updatedAt: '',
+13 -13
View File
@@ -370,7 +370,6 @@ export type Song = {
releaseYear: null | number;
sampleRate: null | number;
size: number;
streamUrl: string;
tags: null | Record<string, string[]>;
trackNumber: number;
updatedAt: string;
@@ -1224,10 +1223,10 @@ export type ControllerEndpoint = {
getSongDetail: (args: SongDetailArgs) => Promise<SongDetailResponse>;
getSongList: (args: SongListArgs) => Promise<SongListResponse>;
getSongListCount: (args: SongListCountArgs) => Promise<number>;
getStreamUrl: (args: StreamArgs) => string;
getStructuredLyrics?: (args: StructuredLyricsArgs) => Promise<StructuredLyric[]>;
getTags?: (args: TagArgs) => Promise<TagResponses>;
getTopSongs: (args: TopSongListArgs) => Promise<TopSongListResponse>;
getTranscodingUrl: (args: TranscodingArgs) => string;
getUserList?: (args: UserListArgs) => Promise<UserListResponse>;
movePlaylistItem?: (args: MoveItemArgs) => Promise<void>;
removeFromPlaylist: (args: RemoveFromPlaylistArgs) => Promise<RemoveFromPlaylistResponse>;
@@ -1314,12 +1313,12 @@ export type InternalControllerEndpoint = {
getSongDetail: (args: ReplaceApiClientProps<SongDetailArgs>) => Promise<SongDetailResponse>;
getSongList: (args: ReplaceApiClientProps<SongListArgs>) => Promise<SongListResponse>;
getSongListCount: (args: ReplaceApiClientProps<SongListCountArgs>) => Promise<number>;
getStreamUrl: (args: ReplaceApiClientProps<StreamArgs>) => string;
getStructuredLyrics?: (
args: ReplaceApiClientProps<StructuredLyricsArgs>,
) => Promise<StructuredLyric[]>;
getTags?: (args: ReplaceApiClientProps<TagArgs>) => Promise<TagResponses>;
getTopSongs: (args: ReplaceApiClientProps<TopSongListArgs>) => Promise<TopSongListResponse>;
getTranscodingUrl: (args: ReplaceApiClientProps<TranscodingArgs>) => string;
getUserList?: (args: ReplaceApiClientProps<UserListArgs>) => Promise<UserListResponse>;
movePlaylistItem?: (args: ReplaceApiClientProps<MoveItemArgs>) => Promise<void>;
removeFromPlaylist: (
@@ -1380,6 +1379,17 @@ export type SimilarSongsQuery = {
songId: string;
};
export type StreamArgs = BaseEndpointArgs & {
query: StreamQuery;
};
export type StreamQuery = {
bitrate?: number;
format?: string;
id: string;
transcode: boolean;
};
export type StructuredLyric = (StructuredSyncedLyric | StructuredUnsyncedLyric) & {
lang: string;
};
@@ -1417,16 +1427,6 @@ export type TagResponses = {
enumTags?: Tag[];
};
export type TranscodingArgs = BaseEndpointArgs & {
query: TranscodingQuery;
};
export type TranscodingQuery = {
base: string;
bitrate?: number;
format?: string;
};
type BaseEndpointArgsWithServer = {
apiClientProps: {
server: null | ServerListItemWithCredential;