Fixes to voice notes playback

This commit is contained in:
Alvaro 2023-02-28 06:07:40 -07:00 committed by GitHub
parent fad0529080
commit 3d4248e070
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 285 additions and 274 deletions

View file

@ -18,7 +18,8 @@ export type Props = Readonly<{
i18n: LocalizerType; i18n: LocalizerType;
title: string; title: string;
currentTime: number; currentTime: number;
duration: number; // not available until audio has loaded
duration: number | undefined;
playbackRate: number; playbackRate: number;
state: PlayerState; state: PlayerState;
onPlay: () => void; onPlay: () => void;
@ -91,11 +92,13 @@ export function MiniPlayer({
<div className="MiniPlayer__state"> <div className="MiniPlayer__state">
<Emojify text={title} /> <Emojify text={title} />
<span className="MiniPlayer__middot">&middot;</span> <span className="MiniPlayer__middot">&middot;</span>
<span> {duration !== undefined && (
{durationToPlaybackText( <span>
state === PlayerState.loading ? duration : currentTime {durationToPlaybackText(
)} state === PlayerState.loading ? duration : currentTime
</span> )}
</span>
)}
</div> </div>
<PlaybackRateButton <PlaybackRateButton

View file

@ -155,11 +155,14 @@ export async function computePeaks(
const pending = inProgressMap.get(computeKey); const pending = inProgressMap.get(computeKey);
if (pending) { if (pending) {
log.info('GlobalAudioContext: already computing peaks for', computeKey); log.info(
'VoiceNotesPlaybackContext: already computing peaks for',
computeKey
);
return pending; return pending;
} }
log.info('GlobalAudioContext: queue computing peaks for', computeKey); log.info('VoiceNotesPlaybackContext: queue computing peaks for', computeKey);
const promise = computeQueue.add(() => doComputePeaks(url, barCount)); const promise = computeQueue.add(() => doComputePeaks(url, barCount));
inProgressMap.set(computeKey, promise); inProgressMap.set(computeKey, promise);
@ -178,10 +181,7 @@ export const VoiceNotesPlaybackContext =
React.createContext<Contents>(globalContents); React.createContext<Contents>(globalContents);
export type VoiceNotesPlaybackProps = { export type VoiceNotesPlaybackProps = {
conversationId: string | undefined;
isPaused: boolean;
children?: React.ReactNode | React.ReactChildren; children?: React.ReactNode | React.ReactChildren;
unloadMessageAudio: () => void;
}; };
/** /**

View file

@ -51,7 +51,7 @@ export type OwnProps = Readonly<{
export type DispatchProps = Readonly<{ export type DispatchProps = Readonly<{
pushPanelForConversation: PushPanelForConversationActionType; pushPanelForConversation: PushPanelForConversationActionType;
setCurrentTime: (currentTime: number) => void; setPosition: (positionAsRatio: number) => void;
setPlaybackRate: (rate: number) => void; setPlaybackRate: (rate: number) => void;
setIsPlaying: (value: boolean) => void; setIsPlaying: (value: boolean) => void;
}>; }>;
@ -226,7 +226,7 @@ export function MessageAudio(props: Props): JSX.Element {
setPlaybackRate, setPlaybackRate,
onPlayMessage, onPlayMessage,
pushPanelForConversation, pushPanelForConversation,
setCurrentTime, setPosition,
setIsPlaying, setIsPlaying,
} = props; } = props;
@ -239,11 +239,7 @@ export function MessageAudio(props: Props): JSX.Element {
// if it's playing, use the duration passed as props as it might // if it's playing, use the duration passed as props as it might
// change during loading/playback (?) // change during loading/playback (?)
// NOTE: Avoid division by zero // NOTE: Avoid division by zero
const activeDuration = const [duration, setDuration] = useState(active?.duration ?? 1e-23);
active?.duration && !Number.isNaN(active.duration)
? active.duration
: undefined;
const [duration, setDuration] = useState(activeDuration ?? 1e-23);
const [hasPeaks, setHasPeaks] = useState(false); const [hasPeaks, setHasPeaks] = useState(false);
const [peaks, setPeaks] = useState<ReadonlyArray<number>>( const [peaks, setPeaks] = useState<ReadonlyArray<number>>(
@ -353,6 +349,14 @@ export function MessageAudio(props: Props): JSX.Element {
progress = 0; progress = 0;
} }
if (active) {
setPosition(progress);
if (!active.playing) {
setIsPlaying(true);
}
return;
}
if (attachment.url) { if (attachment.url) {
onPlayMessage(id, progress); onPlayMessage(id, progress);
} else { } else {
@ -385,12 +389,10 @@ export function MessageAudio(props: Props): JSX.Element {
return; return;
} }
setCurrentTime( const currentPosition = active.currentTime / duration;
Math.min( const positionIncrement = increment / duration;
Number.isNaN(duration) ? Infinity : duration,
Math.max(0, active.currentTime + increment) setPosition(currentPosition + positionIncrement);
)
);
if (!isPlaying) { if (!isPlaying) {
toggleIsPlaying(); toggleIsPlaying();

View file

@ -189,9 +189,9 @@ function MessageAudioContainer({
setPlaying(value); setPlaying(value);
}; };
const setCurrentTimeAction = (value: number) => { const setPosition = (value: number) => {
audio.currentTime = value; audio.currentTime = value * audio.duration;
setCurrentTime(currentTime); setCurrentTime(audio.currentTime);
}; };
const active = isActive const active = isActive
@ -203,11 +203,10 @@ function MessageAudioContainer({
{...props} {...props}
active={active} active={active}
computePeaks={computePeaks} computePeaks={computePeaks}
id="storybook"
onPlayMessage={handlePlayMessage} onPlayMessage={handlePlayMessage}
played={_played} played={_played}
pushPanelForConversation={action('pushPanelForConversation')} pushPanelForConversation={action('pushPanelForConversation')}
setCurrentTime={setCurrentTimeAction} setPosition={setPosition}
setIsPlaying={setIsPlayingAction} setIsPlaying={setIsPlayingAction}
setPlaybackRate={setPlaybackRateAction} setPlaybackRate={setPlaybackRateAction}
/> />

View file

@ -9,11 +9,16 @@ import { noop } from 'lodash';
*/ */
class GlobalMessageAudio { class GlobalMessageAudio {
#audio: HTMLAudioElement = new Audio(); #audio: HTMLAudioElement = new Audio();
#url: string | undefined;
// true immediately after play() is called, even if still loading
#playing = false;
#onLoadedMetadata = noop; #onLoadedMetadata = noop;
#onTimeUpdate = noop; #onTimeUpdate = noop;
#onEnded = noop; #onEnded = noop;
#onDurationChange = noop; #onDurationChange = noop;
#onError = noop;
constructor() { constructor() {
// callbacks must be wrapped by function (not attached directly) // callbacks must be wrapped by function (not attached directly)
@ -29,40 +34,46 @@ class GlobalMessageAudio {
} }
load({ load({
src, url,
playbackRate, playbackRate,
onLoadedMetadata, onLoadedMetadata,
onTimeUpdate, onTimeUpdate,
onDurationChange, onDurationChange,
onEnded, onEnded,
onError,
}: { }: {
src: string; url: string;
playbackRate: number; playbackRate: number;
onLoadedMetadata: () => void; onLoadedMetadata: () => void;
onTimeUpdate: () => void; onTimeUpdate: () => void;
onDurationChange: () => void; onDurationChange: () => void;
onEnded: () => void; onEnded: () => void;
onError: (error: unknown) => void;
}) { }) {
this.#audio.pause(); this.#url = url;
this.#audio.currentTime = 0;
// update callbacks // update callbacks
this.#onLoadedMetadata = onLoadedMetadata; this.#onLoadedMetadata = onLoadedMetadata;
this.#onTimeUpdate = onTimeUpdate; this.#onTimeUpdate = onTimeUpdate;
this.#onDurationChange = onDurationChange; this.#onDurationChange = onDurationChange;
this.#onEnded = onEnded; this.#onEnded = onEnded;
this.#onError = onError;
// changing src resets the playback rate // changing src resets the playback rate
this.#audio.src = src; this.#audio.src = this.#url;
this.#audio.playbackRate = playbackRate; this.#audio.playbackRate = playbackRate;
} }
play(): Promise<void> { play(): void {
return this.#audio.play(); this.#playing = true;
this.#audio.play().catch(error => {
this.#onError(error);
});
} }
pause(): void { pause(): void {
this.#audio.pause(); this.#audio.pause();
this.#playing = false;
} }
get playbackRate() { get playbackRate() {
@ -73,6 +84,14 @@ class GlobalMessageAudio {
this.#audio.playbackRate = rate; this.#audio.playbackRate = rate;
} }
get playing() {
return this.#playing;
}
get url() {
return this.#url;
}
get duration() { get duration() {
return this.#audio.duration; return this.#audio.duration;
} }

View file

@ -5,10 +5,9 @@ import type { ThunkAction } from 'redux-thunk';
import type { ReadonlyDeep } from 'type-fest'; import type { ReadonlyDeep } from 'type-fest';
import type { BoundActionCreatorsMapObject } from '../../hooks/useBoundActions'; import type { BoundActionCreatorsMapObject } from '../../hooks/useBoundActions';
import { useBoundActions } from '../../hooks/useBoundActions'; import { useBoundActions } from '../../hooks/useBoundActions';
import { Sound } from '../../util/Sound';
import type { StateType as RootStateType } from '../reducer'; import type { StateType as RootStateType } from '../reducer';
import { setVoiceNotePlaybackRate, markViewed } from './conversations'; import { setVoiceNotePlaybackRate } from './conversations';
import { extractVoiceNoteForPlayback } from '../selectors/audioPlayer'; import { extractVoiceNoteForPlayback } from '../selectors/audioPlayer';
import type { import type {
VoiceNoteAndConsecutiveForPlayback, VoiceNoteAndConsecutiveForPlayback,
@ -23,14 +22,9 @@ import type {
ConversationChangedActionType, ConversationChangedActionType,
} from './conversations'; } from './conversations';
import * as log from '../../logging/log'; import * as log from '../../logging/log';
import * as Errors from '../../types/errors';
import { strictAssert } from '../../util/assert';
import { globalMessageAudio } from '../../services/globalMessageAudio';
import { getUserConversationId } from '../selectors/user';
import { isAudio } from '../../types/Attachment'; import { isAudio } from '../../types/Attachment';
import { getAttachmentUrlForPath } from '../selectors/message'; import { getAttachmentUrlForPath } from '../selectors/message';
import { SeenStatus } from '../../MessageSeenStatus'; import { assertDev } from '../../util/assert';
// State // State
@ -44,15 +38,15 @@ export type AudioPlayerContent = ReadonlyDeep<{
// false on the first of a consecutive group // false on the first of a consecutive group
isConsecutive: boolean; isConsecutive: boolean;
ourConversationId: string | undefined; ourConversationId: string | undefined;
startPosition: number;
}>; }>;
export type ActiveAudioPlayerStateType = ReadonlyDeep<{ export type ActiveAudioPlayerStateType = ReadonlyDeep<{
playing: boolean; playing: boolean;
currentTime: number; currentTime: number;
playbackRate: number; playbackRate: number;
duration: number; duration: number | undefined; // never zero or NaN
content: AudioPlayerContent | undefined; startPosition: number;
content: AudioPlayerContent;
}>; }>;
export type AudioPlayerStateType = ReadonlyDeep<{ export type AudioPlayerStateType = ReadonlyDeep<{
@ -94,18 +88,18 @@ type CurrentTimeUpdated = ReadonlyDeep<{
payload: number; payload: number;
}>; }>;
type SetPosition = ReadonlyDeep<{
type: 'audioPlayer/SET_POSITION';
payload: number;
}>;
type MessageAudioEnded = ReadonlyDeep<{ type MessageAudioEnded = ReadonlyDeep<{
type: 'audioPlayer/MESSAGE_AUDIO_ENDED'; type: 'audioPlayer/MESSAGE_AUDIO_ENDED';
}>; }>;
type DurationChanged = ReadonlyDeep<{ type DurationChanged = ReadonlyDeep<{
type: 'audioPlayer/DURATION_CHANGED'; type: 'audioPlayer/DURATION_CHANGED';
payload: number; payload: number | undefined;
}>;
type UpdateQueueAction = ReadonlyDeep<{
type: 'audioPlayer/UPDATE_QUEUE';
payload: ReadonlyArray<VoiceNoteForPlayback>;
}>; }>;
type AudioPlayerActionType = ReadonlyDeep< type AudioPlayerActionType = ReadonlyDeep<
@ -115,33 +109,58 @@ type AudioPlayerActionType = ReadonlyDeep<
| MessageAudioEnded | MessageAudioEnded
| CurrentTimeUpdated | CurrentTimeUpdated
| DurationChanged | DurationChanged
| UpdateQueueAction | SetPosition
>; >;
// Action Creators // Action Creators
export const actions = { export const actions = {
loadMessageAudio, loadMessageAudio,
playMessageAudio,
setPlaybackRate, setPlaybackRate,
setCurrentTime, currentTimeUpdated,
durationChanged,
setIsPlaying, setIsPlaying,
setPosition,
pauseVoiceNotePlayer, pauseVoiceNotePlayer,
unloadMessageAudio, unloadMessageAudio,
messageAudioEnded,
}; };
function messageAudioEnded(): MessageAudioEnded {
return {
type: 'audioPlayer/MESSAGE_AUDIO_ENDED',
};
}
function durationChanged(value: number | undefined): DurationChanged {
assertDev(
!Number.isNaN(value) && (value === undefined || value > 0),
`Duration must be > 0 if defined, got ${value}`
);
return {
type: 'audioPlayer/DURATION_CHANGED',
payload: value,
};
}
export const useAudioPlayerActions = (): BoundActionCreatorsMapObject< export const useAudioPlayerActions = (): BoundActionCreatorsMapObject<
typeof actions typeof actions
> => useBoundActions(actions); > => useBoundActions(actions);
function setCurrentTime(value: number): CurrentTimeUpdated { function currentTimeUpdated(value: number): CurrentTimeUpdated {
globalMessageAudio.currentTime = value;
return { return {
type: 'audioPlayer/CURRENT_TIME_UPDATED', type: 'audioPlayer/CURRENT_TIME_UPDATED',
payload: value, payload: value,
}; };
} }
function setPosition(positionAsRatio: number): SetPosition {
return {
type: 'audioPlayer/SET_POSITION',
payload: positionAsRatio,
};
}
function setPlaybackRate( function setPlaybackRate(
rate: number rate: number
): ThunkAction< ): ThunkAction<
@ -153,13 +172,10 @@ function setPlaybackRate(
return (dispatch, getState) => { return (dispatch, getState) => {
const { audioPlayer } = getState(); const { audioPlayer } = getState();
const { active } = audioPlayer; const { active } = audioPlayer;
if (!active?.content) { if (!active) {
log.warn('audioPlayer.setPlaybackRate: No active message audio'); log.warn('audioPlayer.setPlaybackRate: No active message audio');
return; return;
} }
globalMessageAudio.playbackRate = rate;
dispatch({ dispatch({
type: 'audioPlayer/SET_PLAYBACK_RATE', type: 'audioPlayer/SET_PLAYBACK_RATE',
payload: rate, payload: rate,
@ -176,117 +192,6 @@ function setPlaybackRate(
}; };
} }
const stateChangeConfirmUpSound = new Sound({
src: 'sounds/state-change_confirm-up.ogg',
});
const stateChangeConfirmDownSound = new Sound({
src: 'sounds/state-change_confirm-down.ogg',
});
/** plays a message that has been loaded into content */
function playMessageAudio(
playConsecutiveSound: boolean
): ThunkAction<
void,
RootStateType,
unknown,
CurrentTimeUpdated | SetIsPlayingAction | DurationChanged | MessageAudioEnded
> {
return (dispatch, getState) => {
const ourConversationId = getUserConversationId(getState());
if (!ourConversationId) {
log.error('playMessageAudio: No ourConversationId');
return;
}
const { audioPlayer } = getState();
const { active } = audioPlayer;
if (!active) {
log.error('playMessageAudio: Not active');
return;
}
const { content } = active;
if (!content) {
log.error('playMessageAudio: No message audio loaded');
return;
}
const { current } = content;
if (!current.url) {
log.error('playMessageAudio: pending download');
return;
}
if (playConsecutiveSound) {
void stateChangeConfirmUpSound.play();
}
// set source to new message and start playing
globalMessageAudio.load({
src: current.url,
playbackRate: active.playbackRate,
onTimeUpdate: () => {
dispatch({
type: 'audioPlayer/CURRENT_TIME_UPDATED',
payload: globalMessageAudio.currentTime,
});
},
onLoadedMetadata: () => {
strictAssert(
!Number.isNaN(globalMessageAudio.duration),
'Audio should have definite duration on `loadedmetadata` event'
);
log.info('playMessageAudio: `loadedmetadata` event', current.id);
dispatch(
setCurrentTime(content.startPosition * globalMessageAudio.duration)
);
dispatch(setIsPlaying(true));
},
onDurationChange: () => {
log.info('playMessageAudio: `durationchange` event', current.id);
if (!Number.isNaN(globalMessageAudio.duration)) {
dispatch({
type: 'audioPlayer/DURATION_CHANGED',
payload: Math.max(globalMessageAudio.duration, 1e-23),
});
}
},
onEnded: () => {
const { audioPlayer: innerAudioPlayer } = getState();
const { active: innerActive } = innerAudioPlayer;
if (
innerActive?.content?.isConsecutive &&
innerActive.content?.queue.length === 0
) {
void stateChangeConfirmDownSound.play();
}
dispatch({ type: 'audioPlayer/MESSAGE_AUDIO_ENDED' });
},
});
if (!current.isPlayed) {
const message = getState().conversations.messagesLookup[current.id];
if (message && message.seenStatus !== SeenStatus.Unseen) {
markViewed(current.id);
}
} else {
log.info('audioPlayer.loadMessageAudio: message already played', {
message: current.messageIdForLogging,
});
}
};
}
/** /**
* Load message audio into the "content", the smart MiniPlayer will then play it * Load message audio into the "content", the smart MiniPlayer will then play it
*/ */
@ -324,32 +229,10 @@ function loadMessageAudio({
}; };
} }
export function setIsPlaying( function setIsPlaying(value: boolean): SetIsPlayingAction {
value: boolean return {
): ThunkAction< type: 'audioPlayer/SET_IS_PLAYING',
void, payload: value,
RootStateType,
unknown,
SetMessageAudioAction | SetIsPlayingAction
> {
return (dispatch, getState) => {
if (!value) {
globalMessageAudio.pause();
} else {
const { audioPlayer } = getState();
globalMessageAudio.play().catch(error => {
log.error(
'MessageAudio: resume error',
audioPlayer.active?.content?.current.id,
Errors.toLogFormat(error)
);
dispatch(unloadMessageAudio());
});
}
dispatch({
type: 'audioPlayer/SET_IS_PLAYING',
payload: value,
});
}; };
} }
@ -362,7 +245,6 @@ export function pauseVoiceNotePlayer(): ReturnType<typeof setIsPlaying> {
} }
export function unloadMessageAudio(): SetMessageAudioAction { export function unloadMessageAudio(): SetMessageAudioAction {
globalMessageAudio.pause();
return { return {
type: 'audioPlayer/SET_MESSAGE_AUDIO', type: 'audioPlayer/SET_MESSAGE_AUDIO',
payload: undefined, payload: undefined,
@ -392,15 +274,17 @@ export function reducer(
return { return {
...state, ...state,
active: { active:
// defaults payload === undefined
playing: false, ? undefined
currentTime: 0, : {
duration: 0, currentTime: 0,
...active, duration: undefined,
playbackRate: payload?.playbackRate ?? 1, playing: true,
content: payload, playbackRate: payload.playbackRate,
}, content: payload,
startPosition: payload.startPosition,
},
}; };
} }
@ -443,6 +327,19 @@ export function reducer(
}; };
} }
if (action.type === 'audioPlayer/SET_POSITION') {
if (!active) {
return state;
}
return {
...state,
active: {
...active,
startPosition: action.payload,
},
};
}
if (action.type === 'audioPlayer/SET_PLAYBACK_RATE') { if (action.type === 'audioPlayer/SET_PLAYBACK_RATE') {
if (!active) { if (!active) {
return state; return state;
@ -548,12 +445,12 @@ export function reducer(
...state, ...state,
active: { active: {
...active, ...active,
startPosition: 0,
content: { content: {
...content, ...content,
current: nextVoiceNote, current: nextVoiceNote,
queue: newQueue, queue: newQueue,
isConsecutive: true, isConsecutive: true,
startPosition: 0,
}, },
}, },
}; };
@ -561,10 +458,7 @@ export function reducer(
return { return {
...state, ...state,
active: { active: undefined,
...active,
content: undefined,
},
}; };
} }
@ -581,10 +475,6 @@ export function reducer(
} }
const { content } = active; const { content } = active;
if (!content) {
return state;
}
// if we deleted the message currently being played // if we deleted the message currently being played
// move on to the next message // move on to the next message
if (content.current.id === id) { if (content.current.id === id) {
@ -593,10 +483,7 @@ export function reducer(
if (!next) { if (!next) {
return { return {
...state, ...state,
active: { active: undefined,
...active,
content: undefined,
},
}; };
} }

View file

@ -24,7 +24,7 @@ export function SmartMessageAudio({
...props ...props
}: Props): JSX.Element | null { }: Props): JSX.Element | null {
const active = useSelector(selectAudioPlayerActive); const active = useSelector(selectAudioPlayerActive);
const { loadMessageAudio, setIsPlaying, setPlaybackRate, setCurrentTime } = const { loadMessageAudio, setIsPlaying, setPlaybackRate, setPosition } =
useAudioPlayerActions(); useAudioPlayerActions();
const { pushPanelForConversation } = useConversationsActions(); const { pushPanelForConversation } = useConversationsActions();
@ -71,7 +71,7 @@ export function SmartMessageAudio({
onPlayMessage={handlePlayMessage} onPlayMessage={handlePlayMessage}
setPlaybackRate={setPlaybackRate} setPlaybackRate={setPlaybackRate}
setIsPlaying={setIsPlaying} setIsPlaying={setIsPlaying}
setCurrentTime={setCurrentTime} setPosition={setPosition}
pushPanelForConversation={pushPanelForConversation} pushPanelForConversation={pushPanelForConversation}
{...props} {...props}
/> />

View file

@ -1,10 +1,9 @@
// Copyright 2022 Signal Messenger, LLC // Copyright 2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-License-Identifier: AGPL-3.0-only
import React, { useCallback, useEffect } from 'react'; import React, { useCallback } from 'react';
import { useSelector } from 'react-redux'; import { useSelector } from 'react-redux';
import { MiniPlayer, PlayerState } from '../../components/MiniPlayer'; import { MiniPlayer, PlayerState } from '../../components/MiniPlayer';
import { usePrevious } from '../../hooks/usePrevious';
import { useAudioPlayerActions } from '../ducks/audioPlayer'; import { useAudioPlayerActions } from '../ducks/audioPlayer';
import { import {
selectAudioPlayerActive, selectAudioPlayerActive,
@ -22,42 +21,12 @@ export function SmartMiniPlayer(): JSX.Element | null {
const i18n = useSelector(getIntl); const i18n = useSelector(getIntl);
const active = useSelector(selectAudioPlayerActive); const active = useSelector(selectAudioPlayerActive);
const getVoiceNoteTitle = useSelector(selectVoiceNoteTitle); const getVoiceNoteTitle = useSelector(selectVoiceNoteTitle);
const { const { setIsPlaying, setPlaybackRate, unloadMessageAudio } =
setIsPlaying, useAudioPlayerActions();
setPlaybackRate,
unloadMessageAudio,
playMessageAudio,
} = useAudioPlayerActions();
const handlePlay = useCallback(() => setIsPlaying(true), [setIsPlaying]); const handlePlay = useCallback(() => setIsPlaying(true), [setIsPlaying]);
const handlePause = useCallback(() => setIsPlaying(false), [setIsPlaying]); const handlePause = useCallback(() => setIsPlaying(false), [setIsPlaying]);
const previousContent = usePrevious(undefined, active?.content);
useEffect(() => { if (!active) {
if (!active) {
return;
}
const { content } = active;
// if no content, stop playing
if (!content) {
if (active.playing) {
setIsPlaying(false);
}
return;
}
// if the content changed, play the new content
if (content.current.id !== previousContent?.current.id) {
playMessageAudio(content.isConsecutive);
}
// if the start position changed, play at new position
if (content.startPosition !== previousContent?.startPosition) {
playMessageAudio(false);
}
});
if (!active?.content) {
return null; return null;
} }

View file

@ -1,22 +1,154 @@
// Copyright 2021 Signal Messenger, LLC // Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only // SPDX-License-Identifier: AGPL-3.0-only
import { connect } from 'react-redux'; import React, { useEffect } from 'react';
import { mapDispatchToProps } from '../actions'; import { useSelector } from 'react-redux';
import type { VoiceNotesPlaybackProps } from '../../components/VoiceNotesPlaybackContext';
import { VoiceNotesPlaybackProvider } from '../../components/VoiceNotesPlaybackContext'; import { VoiceNotesPlaybackProvider } from '../../components/VoiceNotesPlaybackContext';
import type { StateType } from '../reducer'; import { selectAudioPlayerActive } from '../selectors/audioPlayer';
import { getSelectedConversationId } from '../selectors/conversations'; import { useAudioPlayerActions } from '../ducks/audioPlayer';
import { isPaused } from '../selectors/audioPlayer'; import { globalMessageAudio } from '../../services/globalMessageAudio';
import { strictAssert } from '../../util/assert';
import * as log from '../../logging/log';
import { Sound } from '../../util/Sound';
import { getConversations } from '../selectors/conversations';
import { SeenStatus } from '../../MessageSeenStatus';
import { markViewed } from '../ducks/conversations';
import * as Errors from '../../types/errors';
import { usePrevious } from '../../hooks/usePrevious';
const mapStateToProps = (state: StateType) => { const stateChangeConfirmUpSound = new Sound({
return { src: 'sounds/state-change_confirm-up.ogg',
conversationId: getSelectedConversationId(state), });
isPaused: isPaused(state), const stateChangeConfirmDownSound = new Sound({
}; src: 'sounds/state-change_confirm-down.ogg',
}; });
const smart = connect(mapStateToProps, mapDispatchToProps); /**
* Synchronizes the audioPlayer redux state with globalMessageAudio
*/
export function SmartVoiceNotesPlaybackProvider(
props: VoiceNotesPlaybackProps
): JSX.Element | null {
const active = useSelector(selectAudioPlayerActive);
const conversations = useSelector(getConversations);
export const SmartVoiceNotesPlaybackProvider = smart( const previousStartPosition = usePrevious(undefined, active?.startPosition);
VoiceNotesPlaybackProvider
); const content = active?.content;
const current = content?.current;
const url = current?.url;
const {
messageAudioEnded,
currentTimeUpdated,
durationChanged,
unloadMessageAudio,
} = useAudioPlayerActions();
useEffect(() => {
// if we don't have a new audio source
// just control playback
if (!content || !current || !url || url === globalMessageAudio.url) {
if (!active?.playing && globalMessageAudio.playing) {
globalMessageAudio.pause();
}
if (active?.playing && !globalMessageAudio.playing) {
globalMessageAudio.play();
}
if (active && active.playbackRate !== globalMessageAudio.playbackRate) {
globalMessageAudio.playbackRate = active.playbackRate;
}
if (
active &&
active.startPosition !== undefined &&
active.startPosition !== previousStartPosition
) {
globalMessageAudio.currentTime =
active.startPosition * globalMessageAudio.duration;
}
return;
}
// otherwise we have a new audio source
// we just load it and play it
globalMessageAudio.load({
url,
playbackRate: active.playbackRate,
onLoadedMetadata() {
strictAssert(
!Number.isNaN(globalMessageAudio.duration),
'Audio should have definite duration on `loadedmetadata` event'
);
log.info(
'SmartVoiceNotesPlaybackProvider: `loadedmetadata` event',
current.id
);
if (active.startPosition !== 0) {
globalMessageAudio.currentTime =
active.startPosition * globalMessageAudio.duration;
}
},
onDurationChange() {
log.info(
'SmartVoiceNotesPlaybackProvider: `durationchange` event',
current.id
);
const reportedDuration = globalMessageAudio.duration;
// the underlying Audio element can return NaN if the audio hasn't loaded
// we filter out 0 or NaN as they are not useful values downstream
const newDuration =
Number.isNaN(reportedDuration) || reportedDuration === 0
? undefined
: reportedDuration;
durationChanged(newDuration);
},
onTimeUpdate() {
currentTimeUpdated(globalMessageAudio.currentTime);
},
onEnded() {
if (content.isConsecutive && content.queue.length === 0) {
void stateChangeConfirmDownSound.play();
}
messageAudioEnded();
},
onError(error) {
log.error(
'SmartVoiceNotesPlaybackProvider: playback error',
current.messageIdForLogging,
Errors.toLogFormat(error)
);
unloadMessageAudio();
},
});
// if this message was part of the queue (consecutive, added indirectly)
// we play a note to let the user we're onto a new message
// (false for the first message in a consecutive group, since the user initiated it)
if (content.isConsecutive) {
// eslint-disable-next-line more/no-then
void stateChangeConfirmUpSound.play().then(() => {
globalMessageAudio.play();
});
} else {
globalMessageAudio.play();
}
if (!current.isPlayed) {
const message = conversations.messagesLookup[current.id];
if (message && message.seenStatus !== SeenStatus.Unseen) {
markViewed(current.id);
}
} else {
log.info('SmartVoiceNotesPlaybackProvider: message already played', {
message: current.messageIdForLogging,
});
}
});
return <VoiceNotesPlaybackProvider {...props} />;
}