Voice notes mini-player

This commit is contained in:
Alvaro 2023-02-24 16:18:57 -07:00 committed by GitHub
parent b5849f872a
commit 0e655ceeed
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
45 changed files with 1599 additions and 487 deletions

View file

@ -0,0 +1,107 @@
// Copyright 2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
@mixin audio-icon($name, $icon, $color) {
&--#{$name}::before {
@include color-svg('../images/icons/v2/#{$icon}.svg', $color, false);
-webkit-mask-size: 100%;
}
}
@mixin all-audio-icons($color) {
@include audio-icon(play, play-solid-20, $color);
@include audio-icon(pause, pause-solid-20, $color);
@include audio-icon(pending, audio-spinner-arc-22, $color);
}
.MiniPlayer {
position: absolute;
top: 0;
left: 0;
right: 0;
z-index: calc($z-index-above-above-base + 1);
display: flex;
align-items: center;
gap: 18px;
padding: 8px 16px;
@include light-theme {
background-color: $color-gray-02;
}
@include dark-theme {
background-color: $color-gray-75;
}
&__playback-button {
@include button-reset;
&::before {
display: block;
width: 14px;
height: 14px;
content: '';
}
@include light-theme {
@include all-audio-icons($color-gray-60);
}
@include dark-theme {
@include all-audio-icons($color-gray-15);
}
&--pending::before {
animation: rotate 1000ms linear infinite;
}
}
&__state {
flex: 1;
}
&__middot {
padding: 0 5px;
}
&__close-button {
@include button-reset;
border-radius: 4px;
height: 10px;
width: 10px;
&::before {
content: '';
display: block;
width: 100%;
height: 100%;
@include light-theme {
@include color-svg('../images/icons/v2/x-8.svg', $color-gray-75);
}
@include dark-theme {
@include color-svg('../images/icons/v2/x-8.svg', $color-gray-15);
}
}
@include light-theme {
&:hover,
&:focus {
background: $color-gray-02;
}
&:active {
background: $color-gray-05;
}
}
@include dark-theme {
&:hover,
&:focus {
background: $color-gray-80;
}
&:active {
background: $color-gray-75;
}
}
}
}

View file

@ -0,0 +1,68 @@
// Copyright 2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
.PlaybackRateButton {
@include button-reset;
@include font-body-2-bold;
width: 38px;
height: 18px;
text-align: center;
font-weight: 700;
border-radius: 4px;
font-size: 11px;
padding: 1px 2px;
margin: -2px 0;
line-height: 16px;
letter-spacing: 0.05px;
user-select: none;
&--message-incoming {
@include light-theme {
color: $color-gray-60;
background: $color-black-alpha-08;
}
@include dark-theme {
color: $color-gray-25;
background: $color-white-alpha-08;
}
}
&--message-outgoing {
color: $color-white-alpha-80;
background: $color-white-alpha-20;
}
&--mini-player {
@include light-theme {
color: $color-gray-60;
background: $color-black-alpha-08;
}
@include dark-theme {
color: $color-gray-25;
background: $color-white-alpha-08;
}
}
&::after {
content: '';
display: inline-block;
width: 8px;
height: 8px;
margin-left: 2px;
@mixin x-icon($color) {
@include color-svg('../images/icons/v2/x-8.svg', $color, false);
}
@include light-theme {
@include x-icon($color-gray-60);
}
@include dark-theme {
@include x-icon($color-gray-25);
}
.module-message__audio-attachment--outgoing & {
@include x-icon($color-white-alpha-80);
}
}
}

View file

@ -95,10 +95,12 @@
@import './components/MessageAudio.scss';
@import './components/MessageBody.scss';
@import './components/MessageDetail.scss';
@import './components/MiniPlayer.scss';
@import './components/Modal.scss';
@import './components/MyStories.scss';
@import './components/OutgoingGiftBadgeModal.scss';
@import './components/PermissionsPopup.scss';
@import './components/PlaybackRateButton.scss';
@import './components/Preferences.scss';
@import './components/ProfileEditor.scss';
@import './components/Quote.scss';

View file

@ -42,5 +42,6 @@ global.WebSocket = {};
/* eslint max-classes-per-file: ["error", 2] */
global.AudioContext = class {};
global.Audio = class {
pause() {}
addEventListener() {}
};

View file

@ -34,6 +34,7 @@ export function AvatarLightbox({
media={[]}
saveAttachment={noop}
toggleForwardMessageModal={noop}
onMediaPlaybackStart={noop}
>
<AvatarPreview
avatarColor={avatarColor}

View file

@ -117,6 +117,7 @@ const createProps = (storyProps: Partial<PropsType> = {}): PropsType => ({
toggleSettings: action('toggle-settings'),
toggleSpeakerView: action('toggle-speaker-view'),
isConversationTooBigToRing: false,
pauseVoiceNotePlayer: action('pause-audio-player'),
});
export default {

View file

@ -100,6 +100,7 @@ export type PropsType = {
toggleSettings: () => void;
toggleSpeakerView: () => void;
isConversationTooBigToRing: boolean;
pauseVoiceNotePlayer: () => void;
};
type ActiveCallManagerPropsType = PropsType & {
@ -138,6 +139,7 @@ function ActiveCallManager({
toggleScreenRecordingPermissionsDialog,
toggleSettings,
toggleSpeakerView,
pauseVoiceNotePlayer,
}: ActiveCallManagerPropsType): JSX.Element {
const {
conversation,
@ -157,6 +159,9 @@ function ActiveCallManager({
}, [cancelCall, conversation.id]);
const joinActiveCall = useCallback(() => {
// pause any voice note playback
pauseVoiceNotePlayer();
startCall({
callMode: activeCall.callMode,
conversationId: conversation.id,
@ -169,6 +174,7 @@ function ActiveCallManager({
conversation.id,
hasLocalAudio,
hasLocalVideo,
pauseVoiceNotePlayer,
]);
const getGroupCallVideoFrameSourceForActiveCall = useCallback(

View file

@ -5,6 +5,7 @@ import * as React from 'react';
import { action } from '@storybook/addon-actions';
import { number } from '@storybook/addon-knobs';
import { noop } from 'lodash';
import enMessages from '../../_locales/en/messages.json';
import type { PropsType } from './Lightbox';
@ -62,6 +63,7 @@ const createProps = (overrideProps: Partial<PropsType> = {}): PropsType => ({
saveAttachment: action('saveAttachment'),
selectedIndex: number('selectedIndex', overrideProps.selectedIndex || 0),
toggleForwardMessageModal: action('toggleForwardMessageModal'),
onMediaPlaybackStart: noop,
});
export function Multimedia(): JSX.Element {

View file

@ -34,6 +34,7 @@ export type PropsType = {
saveAttachment: SaveAttachmentActionCreatorType;
selectedIndex?: number;
toggleForwardMessageModal: (messageId: string) => unknown;
onMediaPlaybackStart: () => void;
};
const ZOOM_SCALE = 3;
@ -60,6 +61,7 @@ export function Lightbox({
saveAttachment,
selectedIndex: initialSelectedIndex = 0,
toggleForwardMessageModal,
onMediaPlaybackStart,
}: PropsType): JSX.Element | null {
const [root, setRoot] = React.useState<HTMLElement | undefined>();
const [selectedIndex, setSelectedIndex] =
@ -204,11 +206,12 @@ export function Lightbox({
}
if (videoElement.paused) {
onMediaPlaybackStart();
void videoElement.play();
} else {
videoElement.pause();
}
}, [videoElement]);
}, [videoElement, onMediaPlaybackStart]);
useEffect(() => {
const div = document.createElement('div');

View file

@ -0,0 +1,96 @@
// Copyright 2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import React, { useEffect, useState } from 'react';
import { MiniPlayer, PlayerState } from './MiniPlayer';
import { setupI18n } from '../util/setupI18n';
import enMessages from '../../_locales/en/messages.json';
const i18n = setupI18n('en', enMessages);
const audio = new Audio();
audio.src = '/fixtures/incompetech-com-Agnus-Dei-X.mp3';
export default {
title: 'components/MiniPlayer',
component: MiniPlayer,
};
export function Basic(): JSX.Element {
const [active, setActive] = useState(false);
const [playerState, setPlayerState] = useState(PlayerState.loading);
const [playbackRate, setPlaybackRate] = useState(1);
const [currentTime, setCurrentTime] = useState(0);
const activate = () => {
setActive(true);
setTimeout(() => {
setPlayerState(PlayerState.playing);
void audio.play();
}, 2000);
};
const deactivate = () => {
setActive(false);
setPlayerState(PlayerState.loading);
audio.pause();
audio.currentTime = 0;
};
useEffect(() => {
const handleUpdateTime = () => {
setCurrentTime(audio.currentTime);
};
const handleEnded = () => {
deactivate();
};
audio.addEventListener('timeupdate', handleUpdateTime);
audio.addEventListener('ended', handleEnded);
return () => {
audio.removeEventListener('timeupdate', handleUpdateTime);
audio.removeEventListener('ended', handleEnded);
};
}, [setCurrentTime]);
const playAction = () => {
setPlayerState(PlayerState.playing);
void audio.play();
};
const pauseAction = () => {
setPlayerState(PlayerState.paused);
audio.pause();
};
const setPlaybackRateAction = (rate: number) => {
setPlaybackRate(rate);
audio.playbackRate = rate;
};
return (
<>
{active && (
<MiniPlayer
title="Paige Hall 😉"
i18n={i18n}
onPlay={playAction}
onPause={pauseAction}
onPlaybackRate={setPlaybackRateAction}
state={playerState}
currentTime={currentTime}
duration={Number.isFinite(audio.duration) ? audio.duration : 0}
playbackRate={playbackRate}
onClose={deactivate}
/>
)}
{!active && (
<button type="button" onClick={activate}>
Activate
</button>
)}
</>
);
}

View file

@ -0,0 +1,117 @@
// Copyright 2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import classNames from 'classnames';
import React, { useCallback } from 'react';
import type { LocalizerType } from '../types/Util';
import { durationToPlaybackText } from '../util/durationToPlaybackText';
import { Emojify } from './conversation/Emojify';
import { PlaybackRateButton } from './PlaybackRateButton';
export enum PlayerState {
loading = 'loading',
playing = 'playing',
paused = 'paused',
}
export type Props = Readonly<{
i18n: LocalizerType;
title: string;
currentTime: number;
duration: number;
playbackRate: number;
state: PlayerState;
onPlay: () => void;
onPause: () => void;
onPlaybackRate: (rate: number) => void;
onClose: () => void;
}>;
export function MiniPlayer({
i18n,
title,
state,
currentTime,
duration,
playbackRate,
onPlay,
onPause,
onPlaybackRate,
onClose,
}: Props): JSX.Element {
const updatePlaybackRate = useCallback(() => {
onPlaybackRate(PlaybackRateButton.nextPlaybackRate(playbackRate));
}, [playbackRate, onPlaybackRate]);
const handleClick = useCallback(() => {
switch (state) {
case PlayerState.playing:
onPause();
break;
case PlayerState.paused:
onPlay();
break;
case PlayerState.loading:
break;
default:
throw new TypeError(`Missing case: ${state}`);
}
}, [state, onPause, onPlay]);
let label: string | undefined;
switch (state) {
case PlayerState.playing:
label = i18n('MessageAudio--pause');
break;
case PlayerState.paused:
label = i18n('MessageAudio--play');
break;
case PlayerState.loading:
label = i18n('MessageAudio--pending');
break;
default:
throw new TypeError(`Missing case ${state}`);
}
return (
<div className="MiniPlayer">
<button
type="button"
className={classNames(
'MiniPlayer__playback-button',
state === 'playing' && 'MiniPlayer__playback-button--pause',
state === 'paused' && 'MiniPlayer__playback-button--play',
state === 'loading' && 'MiniPlayer__playback-button--pending'
)}
onClick={handleClick}
aria-label={label}
disabled={state === PlayerState.loading}
/>
<div className="MiniPlayer__state">
<Emojify text={title} />
<span className="MiniPlayer__middot">&middot;</span>
<span>
{durationToPlaybackText(
state === PlayerState.loading ? duration : currentTime
)}
</span>
</div>
<PlaybackRateButton
i18n={i18n}
variant="mini-player"
playbackRate={playbackRate}
onClick={updatePlaybackRate}
visible={state === 'playing'}
/>
<button
type="button"
className="MiniPlayer__close-button"
onClick={onClose}
aria-label={i18n('close')}
/>
</div>
);
}

View file

@ -26,6 +26,7 @@ export type PropsType = {
onDelete: (story: StoryViewType) => unknown;
onForward: (storyId: string) => unknown;
onSave: (story: StoryViewType) => unknown;
onMediaPlaybackStart: () => void;
queueStoryDownload: (storyId: string) => unknown;
retryMessageSend: (messageId: string) => unknown;
viewStory: ViewStoryActionCreatorType;
@ -43,6 +44,7 @@ export function MyStories({
retryMessageSend,
viewStory,
hasViewReceiptSetting,
onMediaPlaybackStart,
}: PropsType): JSX.Element {
const [confirmDeleteStory, setConfirmDeleteStory] = useState<
StoryViewType | undefined
@ -94,6 +96,7 @@ export function MyStories({
key={story.messageId}
onForward={onForward}
onSave={onSave}
onMediaPlaybackStart={onMediaPlaybackStart}
queueStoryDownload={queueStoryDownload}
retryMessageSend={retryMessageSend}
setConfirmDeleteStory={setConfirmDeleteStory}
@ -122,6 +125,7 @@ type StorySentPropsType = Pick<
| 'queueStoryDownload'
| 'retryMessageSend'
| 'viewStory'
| 'onMediaPlaybackStart'
> & {
setConfirmDeleteStory: (_: StoryViewType | undefined) => unknown;
story: StoryViewType;
@ -132,6 +136,7 @@ function StorySent({
i18n,
onForward,
onSave,
onMediaPlaybackStart,
queueStoryDownload,
retryMessageSend,
setConfirmDeleteStory,
@ -177,6 +182,7 @@ function StorySent({
moduleClassName="StoryListItem__previews--image"
queueStoryDownload={queueStoryDownload}
storyId={story.messageId}
onMediaPlaybackStart={onMediaPlaybackStart}
/>
</div>
<div className="MyStories__story__details">

View file

@ -21,6 +21,7 @@ export type PropsType = {
myStories: Array<MyStoryType>;
onAddStory: () => unknown;
onClick: () => unknown;
onMediaPlaybackStart: () => void;
queueStoryDownload: (storyId: string) => unknown;
showToast: ShowToastActionCreatorType;
};
@ -35,6 +36,7 @@ export function MyStoryButton({
myStories,
onAddStory,
onClick,
onMediaPlaybackStart,
queueStoryDownload,
showToast,
}: PropsType): JSX.Element {
@ -190,6 +192,7 @@ export function MyStoryButton({
moduleClassName="StoryListItem__previews--image"
queueStoryDownload={queueStoryDownload}
storyId={newestStory.messageId}
onMediaPlaybackStart={onMediaPlaybackStart}
/>
</div>
</div>

View file

@ -0,0 +1,107 @@
// Copyright 2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import classNames from 'classnames';
import React, { useCallback, useState } from 'react';
import { animated, useSpring } from '@react-spring/web';
import type { LocalizerType } from '../types/Util';
const SPRING_CONFIG = {
mass: 0.5,
tension: 350,
friction: 20,
velocity: 0.01,
};
type Props = {
// undefined if not playing
playbackRate: number | undefined;
variant: 'message-outgoing' | 'message-incoming' | 'mini-player';
onClick: () => void;
visible?: boolean;
i18n: LocalizerType;
};
export function PlaybackRateButton({
playbackRate,
variant,
visible = true,
i18n,
onClick,
}: Props): JSX.Element {
const [isDown, setIsDown] = useState(false);
const [animProps] = useSpring(
{
config: SPRING_CONFIG,
to: isDown ? { scale: 1.3 } : { scale: visible ? 1 : 0 },
},
[visible, isDown]
);
// Clicking button toggle playback
const onButtonClick = useCallback(
(event: React.MouseEvent) => {
event.stopPropagation();
event.preventDefault();
onClick();
},
[onClick]
);
// Keyboard playback toggle
const onButtonKeyDown = useCallback(
(event: React.KeyboardEvent) => {
if (event.key !== 'Enter' && event.key !== 'Space') {
return;
}
event.stopPropagation();
event.preventDefault();
onClick();
},
[onClick]
);
const playbackRateLabels: { [key: number]: string } = {
1: i18n('MessageAudio--playbackRate1'),
1.5: i18n('MessageAudio--playbackRate1p5'),
2: i18n('MessageAudio--playbackRate2'),
0.5: i18n('MessageAudio--playbackRatep5'),
};
const label = playbackRate
? playbackRateLabels[playbackRate].toString()
: undefined;
return (
<animated.div style={animProps}>
<button
type="button"
className={classNames(
'PlaybackRateButton',
`PlaybackRateButton--${variant}`
)}
onClick={onButtonClick}
onKeyDown={onButtonKeyDown}
onMouseDown={() => setIsDown(true)}
onMouseUp={() => setIsDown(false)}
onMouseLeave={() => setIsDown(false)}
aria-label={label}
tabIndex={0}
>
{label}
</button>
</animated.div>
);
}
const playbackRates = [1, 1.5, 2, 0.5];
PlaybackRateButton.nextPlaybackRate = (currentRate: number): number => {
// cycle through the rates
return playbackRates[
(playbackRates.indexOf(currentRate) + 1) % playbackRates.length
];
};

View file

@ -72,6 +72,7 @@ export type PropsType = {
string,
number
>;
onMediaPlaybackStart: () => void;
} & Pick<
StoriesSettingsModalPropsType,
| 'onHideMyStoriesFrom'
@ -138,6 +139,7 @@ export function SendStoryModal({
toggleGroupsForStorySend,
mostRecentActiveStoryTimestampByGroupOrDistributionList,
toggleSignalConnectionsModal,
onMediaPlaybackStart,
}: PropsType): JSX.Element {
const [page, setPage] = useState<PageType>(Page.SendStory);
@ -875,6 +877,7 @@ export function SendStoryModal({
label="label"
moduleClassName="SendStoryModal__story"
attachment={attachment}
onMediaPlaybackStart={onMediaPlaybackStart}
/>
</div>
<div className="SendStoryModal__top-bar">

View file

@ -39,6 +39,7 @@ export type PropsType = {
myStories: Array<MyStoryType>;
onForwardStory: (storyId: string) => unknown;
onSaveStory: (story: StoryViewType) => unknown;
onMediaPlaybackStart: () => void;
preferredWidthFromStorage: number;
queueStoryDownload: (storyId: string) => unknown;
renderStoryCreator: () => JSX.Element;
@ -67,6 +68,7 @@ export function Stories({
myStories,
onForwardStory,
onSaveStory,
onMediaPlaybackStart,
preferredWidthFromStorage,
queueStoryDownload,
renderStoryCreator,
@ -110,6 +112,7 @@ export function Stories({
onDelete={deleteStoryForEveryone}
onForward={onForwardStory}
onSave={onSaveStory}
onMediaPlaybackStart={onMediaPlaybackStart}
queueStoryDownload={queueStoryDownload}
retryMessageSend={retryMessageSend}
viewStory={viewStory}
@ -134,6 +137,7 @@ export function Stories({
}
}}
onStoriesSettings={showStoriesSettings}
onMediaPlaybackStart={onMediaPlaybackStart}
queueStoryDownload={queueStoryDownload}
showConversation={showConversation}
showToast={showToast}

View file

@ -64,6 +64,7 @@ export type PropsType = {
onAddStory: (file?: File) => unknown;
onMyStoriesClicked: () => unknown;
onStoriesSettings: () => unknown;
onMediaPlaybackStart: () => void;
queueStoryDownload: (storyId: string) => unknown;
showConversation: ShowConversationType;
showToast: ShowToastActionCreatorType;
@ -82,6 +83,7 @@ export function StoriesPane({
onAddStory,
onMyStoriesClicked,
onStoriesSettings,
onMediaPlaybackStart,
queueStoryDownload,
showConversation,
showToast,
@ -159,6 +161,7 @@ export function StoriesPane({
onClick={onMyStoriesClicked}
queueStoryDownload={queueStoryDownload}
showToast={showToast}
onMediaPlaybackStart={onMediaPlaybackStart}
/>
{renderedStories.map(story => (
<StoryListItem
@ -174,6 +177,7 @@ export function StoriesPane({
toggleStoriesView();
}}
onHideStory={toggleHideStories}
onMediaPlaybackStart={onMediaPlaybackStart}
queueStoryDownload={queueStoryDownload}
story={story.storyView}
viewUserStories={viewUserStories}
@ -204,6 +208,7 @@ export function StoriesPane({
toggleStoriesView();
}}
onHideStory={toggleHideStories}
onMediaPlaybackStart={onMediaPlaybackStart}
queueStoryDownload={queueStoryDownload}
story={story.storyView}
viewUserStories={viewUserStories}

View file

@ -71,6 +71,7 @@ export type PropsType = {
| 'toggleGroupsForStorySend'
| 'mostRecentActiveStoryTimestampByGroupOrDistributionList'
| 'toggleSignalConnectionsModal'
| 'onMediaPlaybackStart'
> &
Pick<
TextStoryCreatorPropsType,
@ -104,6 +105,7 @@ export function StoryCreator({
onSetSkinTone,
onUseEmoji,
onViewersUpdated,
onMediaPlaybackStart,
ourConversationId,
processAttachment,
recentEmojis,
@ -194,6 +196,7 @@ export function StoryCreator({
setDraftAttachment(undefined);
}}
onViewersUpdated={onViewersUpdated}
onMediaPlaybackStart={onMediaPlaybackStart}
setMyStoriesToAllSignalConnections={
setMyStoriesToAllSignalConnections
}

View file

@ -4,6 +4,7 @@
import React from 'react';
import { v4 as uuid } from 'uuid';
import { action } from '@storybook/addon-actions';
import { noop } from 'lodash';
import type { PropsType } from './StoryImage';
import { StoryImage } from './StoryImage';
@ -32,6 +33,7 @@ function getDefaultProps(): PropsType {
label: 'A story',
queueStoryDownload: action('queueStoryDownload'),
storyId: uuid(),
onMediaPlaybackStart: noop,
};
}

View file

@ -35,6 +35,7 @@ export type PropsType = {
readonly moduleClassName?: string;
readonly queueStoryDownload: (storyId: string) => unknown;
readonly storyId: string;
readonly onMediaPlaybackStart: () => void;
};
export function StoryImage({
@ -50,6 +51,7 @@ export function StoryImage({
moduleClassName,
queueStoryDownload,
storyId,
onMediaPlaybackStart,
}: PropsType): JSX.Element | null {
const shouldDownloadAttachment =
(!isDownloaded(attachment) && !isDownloading(attachment)) ||
@ -72,9 +74,10 @@ export function StoryImage({
if (isPaused) {
videoRef.current.pause();
} else {
onMediaPlaybackStart();
void videoRef.current.play();
}
}, [isPaused]);
}, [isPaused, onMediaPlaybackStart]);
useEffect(() => {
setHasImgError(false);

View file

@ -28,6 +28,7 @@ export type PropsType = Pick<ConversationStoryType, 'group' | 'isHidden'> & {
onGoToConversation: (conversationId: string) => unknown;
onHideStory: (conversationId: string) => unknown;
queueStoryDownload: (storyId: string) => unknown;
onMediaPlaybackStart: () => void;
story: StoryViewType;
viewUserStories: ViewUserStoriesActionCreatorType;
};
@ -88,6 +89,7 @@ export function StoryListItem({
isHidden,
onGoToConversation,
onHideStory,
onMediaPlaybackStart,
queueStoryDownload,
story,
viewUserStories,
@ -195,6 +197,7 @@ export function StoryListItem({
moduleClassName="StoryListItem__previews--image"
queueStoryDownload={queueStoryDownload}
storyId={story.messageId}
onMediaPlaybackStart={onMediaPlaybackStart}
/>
</div>
</ContextMenu>

View file

@ -96,6 +96,7 @@ export type PropsType = {
story: StoryViewType
) => unknown;
onUseEmoji: (_: EmojiPickDataType) => unknown;
onMediaPlaybackStart: () => void;
preferredReactionEmoji: ReadonlyArray<string>;
queueStoryDownload: (storyId: string) => unknown;
recentEmojis?: ReadonlyArray<string>;
@ -148,6 +149,7 @@ export function StoryViewer({
onSetSkinTone,
onTextTooLong,
onUseEmoji,
onMediaPlaybackStart,
preferredReactionEmoji,
queueStoryDownload,
recentEmojis,
@ -618,6 +620,7 @@ export function StoryViewer({
moduleClassName="StoryViewer__story"
queueStoryDownload={queueStoryDownload}
storyId={messageId}
onMediaPlaybackStart={onMediaPlaybackStart}
>
{reactionEmoji && (
<div className="StoryViewer__animated-emojis">

View file

@ -14,7 +14,7 @@ const MAX_AUDIO_DURATION = 15 * 60; // 15 minutes
export type ComputePeaksResult = {
duration: number;
peaks: ReadonlyArray<number>;
peaks: ReadonlyArray<number>; // 0 < peak < 1
};
export type Contents = {
@ -174,9 +174,10 @@ const globalContents: Contents = {
computePeaks,
};
export const GlobalAudioContext = React.createContext<Contents>(globalContents);
export const VoiceNotesPlaybackContext =
React.createContext<Contents>(globalContents);
export type GlobalAudioProps = {
export type VoiceNotesPlaybackProps = {
conversationId: string | undefined;
isPaused: boolean;
children?: React.ReactNode | React.ReactChildren;
@ -187,21 +188,12 @@ export type GlobalAudioProps = {
* A global context that holds Audio, AudioContext, LRU instances that are used
* inside the conversation by ts/components/conversation/MessageAudio.tsx
*/
export function GlobalAudioProvider({
conversationId,
export function VoiceNotesPlaybackProvider({
children,
unloadMessageAudio,
}: GlobalAudioProps): JSX.Element {
// When moving between conversations - stop audio
React.useEffect(() => {
return () => {
unloadMessageAudio();
};
}, [conversationId, unloadMessageAudio]);
}: VoiceNotesPlaybackProps): JSX.Element {
return (
<GlobalAudioContext.Provider value={globalContents}>
<VoiceNotesPlaybackContext.Provider value={globalContents}>
{children}
</GlobalAudioContext.Provider>
</VoiceNotesPlaybackContext.Provider>
);
}

View file

@ -2,6 +2,7 @@
// SPDX-License-Identifier: AGPL-3.0-only
import React from 'react';
import { SmartMiniPlayer } from '../../state/smart/MiniPlayer';
export type PropsType = {
conversationId: string;
@ -86,6 +87,7 @@ export function ConversationView({
{renderConversationHeader()}
</div>
<div className="ConversationView__pane main panel">
<SmartMiniPlayer />
<div className="ConversationView__timeline--container">
<div aria-live="polite" className="ConversationView__timeline">
{renderTimeline()}

View file

@ -2,7 +2,7 @@
// SPDX-License-Identifier: AGPL-3.0-only
import React, { useCallback, useRef, useEffect, useState } from 'react';
import type { RefObject, ReactNode } from 'react';
import type { RefObject } from 'react';
import classNames from 'classnames';
import { noop } from 'lodash';
import { animated, useSpring } from '@react-spring/web';
@ -13,15 +13,21 @@ import type { PushPanelForConversationActionType } from '../../state/ducks/conve
import { isDownloaded } from '../../types/Attachment';
import type { DirectionType, MessageStatusType } from './Message';
import type { ComputePeaksResult } from '../GlobalAudioContext';
import type { ComputePeaksResult } from '../VoiceNotesPlaybackContext';
import { MessageMetadata } from './MessageMetadata';
import * as log from '../../logging/log';
import type { ActiveAudioPlayerStateType } from '../../state/ducks/audioPlayer';
import { PlaybackRateButton } from '../PlaybackRateButton';
import { durationToPlaybackText } from '../../util/durationToPlaybackText';
export type OwnProps = Readonly<{
active: ActiveAudioPlayerStateType | undefined;
active:
| Pick<
ActiveAudioPlayerStateType,
'currentTime' | 'duration' | 'playing' | 'playbackRate'
>
| undefined;
buttonRef: RefObject<HTMLButtonElement>;
renderingContext: string;
i18n: LocalizerType;
attachment: AttachmentType;
collapseMetadata: boolean;
@ -33,7 +39,6 @@ export type OwnProps = Readonly<{
expirationLength?: number;
expirationTimestamp?: number;
id: string;
conversationId: string;
played: boolean;
status?: MessageStatusType;
textPending?: boolean;
@ -41,34 +46,25 @@ export type OwnProps = Readonly<{
kickOffAttachmentDownload(): void;
onCorrupted(): void;
computePeaks(url: string, barCount: number): Promise<ComputePeaksResult>;
onPlayMessage: (id: string, position: number) => void;
}>;
export type DispatchProps = Readonly<{
loadAndPlayMessageAudio: (
id: string,
url: string,
context: string,
position: number,
isConsecutive: boolean
) => void;
pushPanelForConversation: PushPanelForConversationActionType;
setCurrentTime: (currentTime: number) => void;
setPlaybackRate: (conversationId: string, rate: number) => void;
setPlaybackRate: (rate: number) => void;
setIsPlaying: (value: boolean) => void;
}>;
export type Props = OwnProps & DispatchProps;
type ButtonProps = {
variant: 'play' | 'playback-rate';
mod?: string;
label: string;
visible?: boolean;
animateClick?: boolean;
onClick: () => void;
onMouseDown?: () => void;
onMouseUp?: () => void;
children?: ReactNode;
};
enum State {
@ -92,8 +88,6 @@ const REWIND_BAR_COUNT = 2;
const SMALL_INCREMENT = 1;
const BIG_INCREMENT = 5;
const PLAYBACK_RATES = [1, 1.5, 2, 0.5];
const SPRING_CONFIG = {
mass: 0.5,
tension: 350,
@ -103,48 +97,16 @@ const SPRING_CONFIG = {
const DOT_DIV_WIDTH = 14;
// Utils
const timeToText = (time: number): string => {
const hours = Math.floor(time / 3600);
let minutes = Math.floor((time % 3600) / 60).toString();
let seconds = Math.floor(time % 60).toString();
if (hours !== 0 && minutes.length < 2) {
minutes = `0${minutes}`;
}
if (seconds.length < 2) {
seconds = `0${seconds}`;
}
return hours ? `${hours}:${minutes}:${seconds}` : `${minutes}:${seconds}`;
};
/**
* Handles animations, key events, and stopping event propagation
* for play button and playback rate button
*/
const Button = React.forwardRef<HTMLButtonElement, ButtonProps>(
/** Handles animations, key events, and stopping event propagation */
const PlaybackButton = React.forwardRef<HTMLButtonElement, ButtonProps>(
function ButtonInner(props, ref) {
const {
variant,
mod,
label,
children,
onClick,
visible = true,
animateClick = true,
} = props;
const [isDown, setIsDown] = useState(false);
const { mod, label, onClick, visible = true } = props;
const [animProps] = useSpring(
{
config: SPRING_CONFIG,
to:
isDown && animateClick ? { scale: 1.3 } : { scale: visible ? 1 : 0 },
to: { scale: visible ? 1 : 0 },
},
[visible, isDown, animateClick]
[visible]
);
// Clicking button toggle playback
@ -178,19 +140,14 @@ const Button = React.forwardRef<HTMLButtonElement, ButtonProps>(
type="button"
ref={ref}
className={classNames(
`${CSS_BASE}__${variant}-button`,
mod ? `${CSS_BASE}__${variant}-button--${mod}` : undefined
`${CSS_BASE}__play-button`,
mod ? `${CSS_BASE}__play-button--${mod}` : undefined
)}
onClick={onButtonClick}
onKeyDown={onButtonKeyDown}
onMouseDown={() => setIsDown(true)}
onMouseUp={() => setIsDown(false)}
onMouseLeave={() => setIsDown(false)}
tabIndex={0}
aria-label={label}
>
{children}
</button>
/>
</animated.div>
);
}
@ -237,10 +194,9 @@ function PlayedDot({
* toggle Play/Pause button.
*
* A global audio player is used for playback and access is managed by the
* `activeAudioID` and `activeAudioContext` properties. Whenever both
* `activeAudioID` and `activeAudioContext` are equal to `id` and `context`
* respectively the instance of the `MessageAudio` assumes the ownership of the
* `Audio` instance and fully manages it.
* `active.content.current.id` and the `active.content.context` properties. Whenever both
* are equal to `id` and `context` respectively the instance of the `MessageAudio`
* assumes the ownership of the `Audio` instance and fully manages it.
*
* `context` is required for displaying separate MessageAudio instances in
* MessageDetails and Message React components.
@ -250,10 +206,8 @@ export function MessageAudio(props: Props): JSX.Element {
active,
buttonRef,
i18n,
renderingContext,
attachment,
collapseMetadata,
conversationId,
withContentAbove,
withContentBelow,
@ -270,7 +224,7 @@ export function MessageAudio(props: Props): JSX.Element {
onCorrupted,
computePeaks,
setPlaybackRate,
loadAndPlayMessageAudio,
onPlayMessage,
pushPanelForConversation,
setCurrentTime,
setIsPlaying,
@ -373,10 +327,9 @@ export function MessageAudio(props: Props): JSX.Element {
if (active) {
setIsPlaying(true);
} else {
loadAndPlayMessageAudio(id, attachment.url, renderingContext, 0, false);
onPlayMessage(id, 0);
}
} else {
// stop
setIsPlaying(false);
}
};
@ -401,13 +354,7 @@ export function MessageAudio(props: Props): JSX.Element {
}
if (attachment.url) {
loadAndPlayMessageAudio(
id,
attachment.url,
renderingContext,
progress,
false
);
onPlayMessage(id, progress);
} else {
log.warn('Waveform clicked on attachment with no url');
}
@ -467,7 +414,7 @@ export function MessageAudio(props: Props): JSX.Element {
aria-valuenow={currentTimeOrZero}
aria-valuemin={0}
aria-valuemax={duration}
aria-valuetext={timeToText(currentTimeOrZero)}
aria-valuetext={durationToPlaybackText(currentTimeOrZero)}
>
{peaks.map((peak, i) => {
let height = Math.max(BAR_MIN_HEIGHT, BAR_MAX_HEIGHT * peak);
@ -512,26 +459,22 @@ export function MessageAudio(props: Props): JSX.Element {
);
} else if (state === State.NotDownloaded) {
button = (
<Button
<PlaybackButton
ref={buttonRef}
variant="play"
mod="download"
label="MessageAudio--download"
animateClick={false}
onClick={kickOffAttachmentDownload}
/>
);
} else {
// State.Normal
button = (
<Button
<PlaybackButton
ref={buttonRef}
variant="play"
mod={isPlaying ? 'pause' : 'play'}
label={
isPlaying ? i18n('MessageAudio--pause') : i18n('MessageAudio--play')
}
animateClick={false}
onClick={toggleIsPlaying}
/>
);
@ -539,20 +482,6 @@ export function MessageAudio(props: Props): JSX.Element {
const countDown = Math.max(0, duration - (active?.currentTime ?? 0));
const nextPlaybackRate = (currentRate: number): number => {
// cycle through the rates
return PLAYBACK_RATES[
(PLAYBACK_RATES.indexOf(currentRate) + 1) % PLAYBACK_RATES.length
];
};
const playbackRateLabels: { [key: number]: string } = {
1: i18n('MessageAudio--playbackRate1'),
1.5: i18n('MessageAudio--playbackRate1p5'),
2: i18n('MessageAudio--playbackRate2'),
0.5: i18n('MessageAudio--playbackRatep5'),
};
const metadata = (
<div className={`${CSS_BASE}__metadata`}>
<div
@ -562,7 +491,7 @@ export function MessageAudio(props: Props): JSX.Element {
`${CSS_BASE}__countdown--${played ? 'played' : 'unplayed'}`
)}
>
{timeToText(countDown)}
{durationToPlaybackText(countDown)}
</div>
<div className={`${CSS_BASE}__controls`}>
@ -570,21 +499,20 @@ export function MessageAudio(props: Props): JSX.Element {
played={played}
onHide={() => setIsPlayedDotVisible(false)}
/>
<Button
variant="playback-rate"
label={playbackRateLabels[active?.playbackRate ?? 1]}
<PlaybackRateButton
i18n={i18n}
variant={`message-${direction}`}
playbackRate={active?.playbackRate}
visible={isPlaying && (!played || !isPlayedDotVisible)}
onClick={() => {
if (active) {
setPlaybackRate(
conversationId,
nextPlaybackRate(active.playbackRate)
PlaybackRateButton.nextPlaybackRate(active.playbackRate)
);
}
}}
>
{playbackRateLabels[active?.playbackRate ?? 1]}
</Button>
/>
</div>
{!withContentBelow && !collapseMetadata && (

View file

@ -27,7 +27,7 @@ import {
} from '../../types/MIME';
import { ReadStatus } from '../../messages/MessageReadStatus';
import { MessageAudio } from './MessageAudio';
import { computePeaks } from '../GlobalAudioContext';
import { computePeaks } from '../VoiceNotesPlaybackContext';
import { setupI18n } from '../../util/setupI18n';
import enMessages from '../../../_locales/en/messages.json';
import { pngUrl } from '../../storybook/Fixtures';
@ -89,6 +89,10 @@ const Template: Story<Partial<Props>> = args => {
});
};
const messageIdToAudioUrl = {
'incompetech-com-Agnus-Dei-X': '/fixtures/incompetech-com-Agnus-Dei-X.mp3',
};
function getJoyReaction() {
return {
emoji: '😂',
@ -152,14 +156,9 @@ function MessageAudioContainer({
// eslint-disable-next-line react-hooks/exhaustive-deps
}, []);
const loadAndPlayMessageAudio = (
_id: string,
url: string,
_context: string,
position: number
) => {
const handlePlayMessage = (id: string, position: number) => {
if (!active) {
audio.src = url;
audio.src = messageIdToAudioUrl[id as keyof typeof messageIdToAudioUrl];
setIsActive(true);
}
if (!playing) {
@ -176,7 +175,7 @@ function MessageAudioContainer({
}
};
const setPlaybackRateAction = (_conversationId: string, rate: number) => {
const setPlaybackRateAction = (rate: number) => {
audio.playbackRate = rate;
setPlaybackRate(rate);
};
@ -202,14 +201,12 @@ function MessageAudioContainer({
return (
<MessageAudio
{...props}
conversationId="some-conversation-id"
active={active}
computePeaks={computePeaks}
id="storybook"
loadAndPlayMessageAudio={loadAndPlayMessageAudio}
onPlayMessage={handlePlayMessage}
played={_played}
pushPanelForConversation={action('pushPanelForConversation')}
renderingContext="storybook"
setCurrentTime={setCurrentTimeAction}
setIsPlaying={setIsPlayingAction}
setPlaybackRate={setPlaybackRateAction}
@ -427,11 +424,12 @@ export function EmojiMessages(): JSX.Element {
<br />
<TimelineMessage
{...createProps({
id: 'incompetech-com-Agnus-Dei-X',
attachments: [
fakeAttachment({
contentType: AUDIO_MP3,
fileName: 'incompetech-com-Agnus-Dei-X.mp3',
url: '/fixtures/incompetech-com-Agnus-Dei-X.mp3',
url: messageIdToAudioUrl['incompetech-com-Agnus-Dei-X'],
}),
],
text: '😀',
@ -1353,11 +1351,12 @@ export const _Audio = (): JSX.Element => {
const [isPlayed, setIsPlayed] = React.useState(false);
const messageProps = createProps({
id: 'incompetech-com-Agnus-Dei-X',
attachments: [
fakeAttachment({
contentType: AUDIO_MP3,
fileName: 'incompetech-com-Agnus-Dei-X.mp3',
url: '/fixtures/incompetech-com-Agnus-Dei-X.mp3',
url: messageIdToAudioUrl['incompetech-com-Agnus-Dei-X'],
path: 'somepath',
}),
],

View file

@ -30,12 +30,14 @@ class GlobalMessageAudio {
load({
src,
playbackRate,
onLoadedMetadata,
onTimeUpdate,
onDurationChange,
onEnded,
}: {
src: string;
playbackRate: number;
onLoadedMetadata: () => void;
onTimeUpdate: () => void;
onDurationChange: () => void;
@ -50,7 +52,9 @@ class GlobalMessageAudio {
this.#onDurationChange = onDurationChange;
this.#onEnded = onEnded;
// changing src resets the playback rate
this.#audio.src = src;
this.#audio.playbackRate = playbackRate;
}
play(): Promise<void> {

View file

@ -6,62 +6,75 @@ import type { ReadonlyDeep } from 'type-fest';
import type { BoundActionCreatorsMapObject } from '../../hooks/useBoundActions';
import { useBoundActions } from '../../hooks/useBoundActions';
import { Sound } from '../../util/Sound';
import * as Errors from '../../types/errors';
import type { StateType as RootStateType } from '../reducer';
import { selectNextConsecutiveVoiceNoteMessageId } from '../selectors/audioPlayer';
import {
getConversationByIdSelector,
getSelectedConversationId,
} from '../selectors/conversations';
import { setVoiceNotePlaybackRate, markViewed } from './conversations';
import { extractVoiceNoteForPlayback } from '../selectors/audioPlayer';
import type {
VoiceNoteAndConsecutiveForPlayback,
VoiceNoteForPlayback,
} from '../selectors/audioPlayer';
import type {
MessagesAddedActionType,
MessageDeletedActionType,
MessageChangedActionType,
SelectedConversationChangedActionType,
ConversationChangedActionType,
} from './conversations';
import {
SELECTED_CONVERSATION_CHANGED,
setVoiceNotePlaybackRate,
markViewed,
} from './conversations';
import * as log from '../../logging/log';
import * as Errors from '../../types/errors';
import { strictAssert } from '../../util/assert';
import { globalMessageAudio } from '../../services/globalMessageAudio';
import { isPlayed } from '../../types/Attachment';
import { getMessageIdForLogging } from '../../util/idForLogging';
import { getMessagePropStatus } from '../selectors/message';
import { getUserConversationId } from '../selectors/user';
import { isAudio } from '../../types/Attachment';
import { getAttachmentUrlForPath } from '../selectors/message';
import { SeenStatus } from '../../MessageSeenStatus';
// State
export type AudioPlayerContent = ReadonlyDeep<{
conversationId: string;
context: string;
current: VoiceNoteForPlayback;
queue: ReadonlyArray<VoiceNoteForPlayback>;
nextMessageTimestamp: number | undefined;
// playing because it followed a message
// false on the first of a consecutive group
isConsecutive: boolean;
ourConversationId: string | undefined;
startPosition: number;
}>;
export type ActiveAudioPlayerStateType = ReadonlyDeep<{
playing: boolean;
currentTime: number;
playbackRate: number;
duration: number;
content: AudioPlayerContent | undefined;
}>;
export type AudioPlayerStateType = ReadonlyDeep<{
active:
| (ActiveAudioPlayerStateType & { id: string; context: string })
| undefined;
active: ActiveAudioPlayerStateType | undefined;
}>;
// Actions
/**
* Sets the current "active" message audio for a particular rendering "context"
*/
export type SetMessageAudioAction = ReadonlyDeep<{
type: 'audioPlayer/SET_MESSAGE_AUDIO';
payload:
| {
id: string;
conversationId: string;
context: string;
current: VoiceNoteForPlayback;
queue: ReadonlyArray<VoiceNoteForPlayback>;
isConsecutive: boolean;
// timestamp of the message following the last one in the queue
nextMessageTimestamp: number | undefined;
ourConversationId: string | undefined;
startPosition: number;
playbackRate: number;
duration: number;
}
| undefined;
}>;
@ -71,7 +84,7 @@ type SetPlaybackRate = ReadonlyDeep<{
payload: number;
}>;
type SetIsPlayingAction = ReadonlyDeep<{
export type SetIsPlayingAction = ReadonlyDeep<{
type: 'audioPlayer/SET_IS_PLAYING';
payload: boolean;
}>;
@ -90,6 +103,11 @@ type DurationChanged = ReadonlyDeep<{
payload: number;
}>;
type UpdateQueueAction = ReadonlyDeep<{
type: 'audioPlayer/UPDATE_QUEUE';
payload: ReadonlyArray<VoiceNoteForPlayback>;
}>;
type AudioPlayerActionType = ReadonlyDeep<
| SetMessageAudioAction
| SetIsPlayingAction
@ -97,20 +115,24 @@ type AudioPlayerActionType = ReadonlyDeep<
| MessageAudioEnded
| CurrentTimeUpdated
| DurationChanged
| UpdateQueueAction
>;
// Action Creators
export const actions = {
loadAndPlayMessageAudio,
unloadMessageAudio,
loadMessageAudio,
playMessageAudio,
setPlaybackRate,
setCurrentTime,
setIsPlaying,
pauseVoiceNotePlayer,
unloadMessageAudio,
};
export const useActions = (): BoundActionCreatorsMapObject<typeof actions> =>
useBoundActions(actions);
export const useAudioPlayerActions = (): BoundActionCreatorsMapObject<
typeof actions
> => useBoundActions(actions);
function setCurrentTime(value: number): CurrentTimeUpdated {
globalMessageAudio.currentTime = value;
@ -120,20 +142,7 @@ function setCurrentTime(value: number): CurrentTimeUpdated {
};
}
function setIsPlaying(value: boolean): SetIsPlayingAction {
if (!value) {
globalMessageAudio.pause();
} else {
void globalMessageAudio.play();
}
return {
type: 'audioPlayer/SET_IS_PLAYING',
payload: value,
};
}
function setPlaybackRate(
conversationId: string,
rate: number
): ThunkAction<
void,
@ -141,14 +150,23 @@ function setPlaybackRate(
unknown,
SetPlaybackRate | ConversationChangedActionType
> {
return dispatch => {
return (dispatch, getState) => {
const { audioPlayer } = getState();
const { active } = audioPlayer;
if (!active?.content) {
log.warn('audioPlayer.setPlaybackRate: No active message audio');
return;
}
globalMessageAudio.playbackRate = rate;
dispatch({
type: 'audioPlayer/SET_PLAYBACK_RATE',
payload: rate,
});
// update the preference for the conversation
const { conversationId } = active.content;
dispatch(
setVoiceNotePlaybackRate({
conversationId,
@ -158,14 +176,6 @@ function setPlaybackRate(
};
}
function unloadMessageAudio(): SetMessageAudioAction {
globalMessageAudio.pause();
return {
type: 'audioPlayer/SET_MESSAGE_AUDIO',
payload: undefined,
};
}
const stateChangeConfirmUpSound = new Sound({
src: 'sounds/state-change_confirm-up.ogg',
});
@ -173,30 +183,52 @@ const stateChangeConfirmDownSound = new Sound({
src: 'sounds/state-change_confirm-down.ogg',
});
/**
* @param isConsecutive Is this part of a consecutive group (not first though)
*/
function loadAndPlayMessageAudio(
id: string,
url: string,
context: string,
position: number,
isConsecutive: boolean
/** plays a message that has been loaded into content */
function playMessageAudio(
playConsecutiveSound: boolean
): ThunkAction<
void,
RootStateType,
unknown,
| SetMessageAudioAction
| MessageAudioEnded
| CurrentTimeUpdated
| SetIsPlayingAction
| DurationChanged
CurrentTimeUpdated | SetIsPlayingAction | DurationChanged | MessageAudioEnded
> {
return (dispatch, getState) => {
const ourConversationId = getUserConversationId(getState());
if (!ourConversationId) {
log.error('playMessageAudio: No ourConversationId');
return;
}
const { audioPlayer } = getState();
const { active } = audioPlayer;
if (!active) {
log.error('playMessageAudio: Not active');
return;
}
const { content } = active;
if (!content) {
log.error('playMessageAudio: No message audio loaded');
return;
}
const { current } = content;
if (!current.url) {
log.error('playMessageAudio: pending download');
return;
}
if (playConsecutiveSound) {
void stateChangeConfirmUpSound.play();
}
// set source to new message and start playing
globalMessageAudio.load({
src: url,
src: current.url,
playbackRate: active.playbackRate,
onTimeUpdate: () => {
dispatch({
type: 'audioPlayer/CURRENT_TIME_UPDATED',
@ -210,18 +242,16 @@ function loadAndPlayMessageAudio(
'Audio should have definite duration on `loadedmetadata` event'
);
log.info('MessageAudio: `loadedmetadata` event', id);
log.info('playMessageAudio: `loadedmetadata` event', current.id);
// Sync-up audio's time in case if <audio/> loaded its source after
// user clicked on waveform
if (getState().audioPlayer.active) {
globalMessageAudio.currentTime =
position * globalMessageAudio.duration;
}
dispatch(
setCurrentTime(content.startPosition * globalMessageAudio.duration)
);
dispatch(setIsPlaying(true));
},
onDurationChange: () => {
log.info('MessageAudio: `durationchange` event', id);
log.info('playMessageAudio: `durationchange` event', current.id);
if (!Number.isNaN(globalMessageAudio.duration)) {
dispatch({
@ -232,88 +262,110 @@ function loadAndPlayMessageAudio(
},
onEnded: () => {
const nextVoiceNoteMessage = selectNextConsecutiveVoiceNoteMessageId(
getState()
);
dispatch({
type: 'audioPlayer/MESSAGE_AUDIO_ENDED',
});
// play the next message
// for now we can just read the current conversation
// this won't work when we allow a message to continue to play as the user
// navigates away from the conversation
// TODO: DESKTOP-4158
if (nextVoiceNoteMessage) {
void stateChangeConfirmUpSound.play();
dispatch(
loadAndPlayMessageAudio(
nextVoiceNoteMessage.id,
nextVoiceNoteMessage.url,
context,
0,
true
)
);
} else if (isConsecutive) {
const { audioPlayer: innerAudioPlayer } = getState();
const { active: innerActive } = innerAudioPlayer;
if (
innerActive?.content?.isConsecutive &&
innerActive.content?.queue.length === 0
) {
void stateChangeConfirmDownSound.play();
}
dispatch({ type: 'audioPlayer/MESSAGE_AUDIO_ENDED' });
},
});
// mark the message as played
const message = getState().conversations.messagesLookup[id];
if (message) {
const messageIdForLogging = getMessageIdForLogging(message);
const status = getMessagePropStatus(message, message.conversationId);
if (message.type === 'incoming' || message.type === 'outgoing') {
if (!isPlayed(message.type, status, message.readStatus)) {
markViewed(id);
} else {
log.info(
'audioPlayer.loadAndPlayMessageAudio: message already played',
{ message: messageIdForLogging }
);
if (!current.isPlayed) {
const message = getState().conversations.messagesLookup[current.id];
if (message && message.seenStatus !== SeenStatus.Unseen) {
markViewed(current.id);
}
} else {
log.warn(
`audioPlayer.loadAndPlayMessageAudio: message wrong type: ${message.type}`,
{ message: messageIdForLogging }
);
}
} else {
log.warn('audioPlayer.loadAndPlayMessageAudio: message not found', {
message: id,
log.info('audioPlayer.loadMessageAudio: message already played', {
message: current.messageIdForLogging,
});
}
};
}
// set the playback rate to the stored value for the selected conversation
const conversationId = getSelectedConversationId(getState());
if (conversationId) {
const conversation = getConversationByIdSelector(getState())(
conversationId
);
globalMessageAudio.playbackRate =
conversation?.voiceNotePlaybackRate ?? 1;
}
globalMessageAudio.play().catch(error => {
log.error('MessageAudio: resume error', id, Errors.toLogFormat(error));
dispatch(unloadMessageAudio());
});
dispatch({
/**
* Load message audio into the "content", the smart MiniPlayer will then play it
*/
function loadMessageAudio({
voiceNoteData,
position,
context,
ourConversationId,
}: {
voiceNoteData: VoiceNoteAndConsecutiveForPlayback;
position: number;
context: string;
ourConversationId: string;
}): SetMessageAudioAction {
const {
conversationId,
voiceNote,
consecutiveVoiceNotes,
playbackRate,
nextMessageTimestamp,
} = voiceNoteData;
return {
type: 'audioPlayer/SET_MESSAGE_AUDIO',
payload: {
id,
conversationId,
context,
playbackRate: globalMessageAudio.playbackRate,
duration: globalMessageAudio.duration,
current: voiceNote,
queue: consecutiveVoiceNotes,
isConsecutive: false,
nextMessageTimestamp,
ourConversationId,
startPosition: position,
playbackRate,
},
});
};
}
dispatch(setIsPlaying(true));
export function setIsPlaying(
value: boolean
): ThunkAction<
void,
RootStateType,
unknown,
SetMessageAudioAction | SetIsPlayingAction
> {
return (dispatch, getState) => {
if (!value) {
globalMessageAudio.pause();
} else {
const { audioPlayer } = getState();
globalMessageAudio.play().catch(error => {
log.error(
'MessageAudio: resume error',
audioPlayer.active?.content?.current.id,
Errors.toLogFormat(error)
);
dispatch(unloadMessageAudio());
});
}
dispatch({
type: 'audioPlayer/SET_IS_PLAYING',
payload: value,
});
};
}
/**
* alias for callers that just want to pause any voice notes before starting
* their own playback: story viewer, media viewer, calling
*/
export function pauseVoiceNotePlayer(): ReturnType<typeof setIsPlaying> {
return setIsPlaying(false);
}
export function unloadMessageAudio(): SetMessageAudioAction {
globalMessageAudio.pause();
return {
type: 'audioPlayer/SET_MESSAGE_AUDIO',
payload: undefined,
};
}
@ -329,117 +381,329 @@ export function reducer(
| AudioPlayerActionType
| MessageDeletedActionType
| MessageChangedActionType
| MessagesAddedActionType
| SelectedConversationChangedActionType
>
): AudioPlayerStateType {
const { active } = state;
if (action.type === 'audioPlayer/SET_MESSAGE_AUDIO') {
const { payload } = action;
return {
...state,
active: payload
? {
...payload,
playing: true,
active: {
// defaults
playing: false,
currentTime: 0,
}
: undefined,
};
}
// Reset activeAudioID on conversation change.
if (action.type === SELECTED_CONVERSATION_CHANGED) {
return {
...state,
active: undefined,
duration: 0,
...active,
playbackRate: payload?.playbackRate ?? 1,
content: payload,
},
};
}
if (action.type === 'audioPlayer/CURRENT_TIME_UPDATED') {
if (!active) {
return state;
}
return {
...state,
active: state.active
? {
...state.active,
active: {
...active,
currentTime: action.payload,
}
: undefined,
},
};
}
if (action.type === 'audioPlayer/DURATION_CHANGED') {
if (!active) {
return state;
}
return {
...state,
active: state.active
? {
...state.active,
active: {
...active,
duration: action.payload,
}
: undefined,
};
}
if (action.type === 'audioPlayer/MESSAGE_AUDIO_ENDED') {
return {
...state,
active: undefined,
},
};
}
if (action.type === 'audioPlayer/SET_IS_PLAYING') {
if (!active) {
return state;
}
return {
...state,
active: state.active
? {
...state.active,
active: {
...active,
playing: action.payload,
}
: undefined,
},
};
}
if (action.type === 'audioPlayer/SET_PLAYBACK_RATE') {
if (!active) {
return state;
}
return {
...state,
active: state.active
? {
...state.active,
active: {
...active,
playbackRate: action.payload,
}
: undefined,
},
};
}
// Reset activeAudioID on when played message is deleted on expiration.
if (action.type === 'MESSAGE_DELETED') {
const { id } = action.payload;
if (state.active?.id !== id) {
if (action.type === 'MESSAGES_ADDED') {
if (!active) {
return state;
}
const { content } = active;
if (!content) {
return state;
}
if (content.conversationId !== action.payload.conversationId) {
return state;
}
const updatedQueue: Array<VoiceNoteForPlayback> = [...content.queue];
for (const message of action.payload.messages) {
if (message.deletedForEveryone) {
continue;
}
if (message.timestamp < content.current.timestamp) {
continue;
}
// in range of the queue
if (
content.nextMessageTimestamp === undefined ||
message.timestamp < content.nextMessageTimestamp
) {
if (message.type !== 'incoming' && message.type !== 'outgoing') {
continue;
}
const voiceNote = extractVoiceNoteForPlayback(
message,
content.ourConversationId
);
// index of the message in the queue after this one
const idx = updatedQueue.findIndex(
m => m.timestamp > message.timestamp
);
// break up consecutive queue: drop values older than this message
if (!voiceNote && idx !== -1) {
updatedQueue.splice(idx);
continue;
}
// insert a new voice note
if (voiceNote) {
if (idx === -1) {
updatedQueue.push(voiceNote);
} else {
updatedQueue.splice(idx, 0, voiceNote);
}
}
}
}
if (updatedQueue.length === content.queue.length) {
return state;
}
return {
...state,
active: undefined,
active: {
...active,
content: {
...content,
queue: updatedQueue,
},
},
};
}
// Reset activeAudioID on when played message is deleted for everyone.
if (action.type === 'audioPlayer/MESSAGE_AUDIO_ENDED') {
if (!active) {
return state;
}
const { content } = active;
if (!content) {
return state;
}
const { queue } = content;
const [nextVoiceNote, ...newQueue] = queue;
if (nextVoiceNote) {
return {
...state,
active: {
...active,
content: {
...content,
current: nextVoiceNote,
queue: newQueue,
isConsecutive: true,
startPosition: 0,
},
},
};
}
return {
...state,
active: {
...active,
content: undefined,
},
};
}
// Reset active when played message is deleted on expiration or DOE.
if (
action.type === 'MESSAGE_DELETED' ||
(action.type === 'MESSAGE_CHANGED' &&
action.payload.data.deletedForEveryone)
) {
const { id } = action.payload;
if (!active) {
return state;
}
const { content } = active;
if (!content) {
return state;
}
// if we deleted the message currently being played
// move on to the next message
if (content.current.id === id) {
const [next, ...rest] = content.queue;
if (!next) {
return {
...state,
active: {
...active,
content: undefined,
},
};
}
return {
...state,
active: {
...active,
content: {
...content,
current: next,
queue: rest,
},
},
};
}
// if we deleted a message on the queue
// just update the queue
const message = content.queue.find(el => el.id === id);
if (message) {
return {
...state,
active: {
...active,
content: {
...content,
queue: content.queue.filter(el => el.id !== id),
},
},
};
}
return state;
}
// if it's a voice note
// and this event is letting us know that it has downloaded
// update the url if it's in the queue
if (action.type === 'MESSAGE_CHANGED') {
if (!active) {
return state;
}
const { content } = active;
if (!content) {
return state;
}
const { id, data } = action.payload;
if (state.active?.id !== id) {
const { attachments } = data;
const attachment = attachments?.[0];
if (
!attachments ||
!attachment ||
!isAudio(attachments) ||
!attachment.path
) {
return state;
}
if (!data.deletedForEveryone) {
return state;
const url = getAttachmentUrlForPath(attachment.path);
// if we got the url for the current message
if (
content.current.id === id &&
content.current.url === undefined &&
data.id
) {
return {
...state,
active: {
...active,
content: {
...content,
current: {
...content.current,
url,
},
},
},
};
}
// if it's in the queue
const idx = content.queue.findIndex(v => v.id === id);
if (idx !== -1) {
const updatedQueue = [...content.queue];
updatedQueue[idx] = {
...updatedQueue[idx],
url,
};
return {
...state,
active: undefined,
active: {
...active,
content: {
...content,
queue: updatedQueue,
},
},
};
}
return state;
}
return state;
}

View file

@ -2445,6 +2445,7 @@ function messageChanged(
},
};
}
function messageDeleted(
id: string,
conversationId: string
@ -2457,6 +2458,7 @@ function messageDeleted(
},
};
}
function messageExpanded(
id: string,
displayLimit: number
@ -2477,6 +2479,7 @@ function messageExpired(id: string): MessageExpiredActionType {
},
};
}
function messagesAdded({
conversationId,
isActive,

View file

@ -8,12 +8,12 @@ import { Provider } from 'react-redux';
import type { Store } from 'redux';
import { SmartApp } from '../smart/App';
import { SmartGlobalAudioProvider } from '../smart/GlobalAudioProvider';
import { SmartVoiceNotesPlaybackProvider } from '../smart/VoiceNotesPlaybackProvider';
export const createApp = (store: Store): ReactElement => (
<Provider store={store}>
<SmartGlobalAudioProvider>
<SmartVoiceNotesPlaybackProvider>
<SmartApp />
</SmartGlobalAudioProvider>
</SmartVoiceNotesPlaybackProvider>
</Provider>
);

View file

@ -2,67 +2,211 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { createSelector } from 'reselect';
import { collectFirst } from '../../util/iterables';
import {
getIntl,
getUserACI,
getUserConversationId,
getUserNumber,
} from './user';
import {
getAttachmentUrlForPath,
getMessagePropStatus,
getSource,
getSourceUuid,
} from './message';
import {
getConversationByIdSelector,
getConversations,
getConversationSelector,
getSelectedConversationId,
} from './conversations';
import type { StateType } from '../reducer';
import { getConversations } from './conversations';
import { getPropsForAttachment } from './message';
import * as log from '../../logging/log';
import type { MessageWithUIFieldsType } from '../ducks/conversations';
import type { MessageAttributesType } from '../../model-types.d';
import { getMessageIdForLogging } from '../../util/idForLogging';
import * as Attachment from '../../types/Attachment';
import type { ActiveAudioPlayerStateType } from '../ducks/audioPlayer';
import { isPlayed } from '../../types/Attachment';
import type { UUIDStringType } from '../../types/UUID';
export type VoiceNoteForPlayback = {
id: string;
// undefined if download is pending
url: string | undefined;
type: 'incoming' | 'outgoing';
source: string | undefined;
sourceUuid: UUIDStringType | undefined;
isPlayed: boolean;
messageIdForLogging: string;
timestamp: number;
};
export const isPaused = (state: StateType): boolean => {
return state.audioPlayer.active === undefined;
};
export const selectActiveVoiceNoteMessageId = (
export const selectAudioPlayerActive = (
state: StateType
): string | undefined => state.audioPlayer.active?.id;
): ActiveAudioPlayerStateType | undefined => {
return state.audioPlayer.active;
};
export const selectNextConsecutiveVoiceNoteMessageId = createSelector(
getConversations,
selectActiveVoiceNoteMessageId,
(
conversations,
activeVoiceNoteMessageId
): { id: string; url: string } | undefined => {
if (!activeVoiceNoteMessageId) {
return undefined;
}
export const selectVoiceNoteTitle = createSelector(
getUserNumber,
getUserACI,
getUserConversationId,
getConversationSelector,
getIntl,
(ourNumber, ourACI, ourConversationId, conversationSelector, i18n) => {
return (
message: Pick<MessageAttributesType, 'type' | 'source' | 'sourceUuid'>
) => {
const source = getSource(message, ourNumber);
const sourceUuid = getSourceUuid(message, ourACI);
const currentMessage =
conversations.messagesLookup[activeVoiceNoteMessageId];
const conversationMessages =
conversations.messagesByConversation[currentMessage.conversationId];
const conversation =
!source && !sourceUuid
? conversationSelector(ourConversationId)
: conversationSelector(sourceUuid || source);
if (!conversationMessages) {
return undefined;
}
const idx = conversationMessages.messageIds.indexOf(
activeVoiceNoteMessageId
);
const nextIdx = idx + 1;
if (!(nextIdx in conversationMessages.messageIds)) {
return undefined;
}
const nextMessageId = conversationMessages.messageIds[nextIdx];
const nextMessage = conversations.messagesLookup[nextMessageId];
if (!nextMessage.attachments) {
return undefined;
}
const voiceNoteUrl = collectFirst(
nextMessage.attachments.map(getPropsForAttachment),
a => (a && a.isVoiceMessage && a.url ? a.url : undefined)
);
if (!voiceNoteUrl) {
return undefined;
}
return {
id: nextMessageId,
url: voiceNoteUrl,
return conversation.isMe ? i18n('you') : conversation.title;
};
}
);
export function extractVoiceNoteForPlayback(
message: MessageAttributesType,
ourConversationId: string | undefined
): VoiceNoteForPlayback | undefined {
const { type } = message;
if (type !== 'incoming' && type !== 'outgoing') {
return;
}
if (!message.attachments) {
return;
}
const attachment = message.attachments[0];
if (!attachment || !Attachment.isAudio(message.attachments)) {
return;
}
const voiceNoteUrl = attachment.path
? getAttachmentUrlForPath(attachment.path)
: undefined;
const status = getMessagePropStatus(message, ourConversationId);
return {
id: message.id,
url: voiceNoteUrl,
type,
isPlayed: isPlayed(type, status, message.readStatus),
messageIdForLogging: getMessageIdForLogging(message),
timestamp: message.timestamp,
source: message.source,
sourceUuid: message.sourceUuid,
};
}
/** Data necessary to playback a voice note and any consecutive notes */
export type VoiceNoteAndConsecutiveForPlayback = {
conversationId: string;
voiceNote: VoiceNoteForPlayback;
previousMessageId: string | undefined;
consecutiveVoiceNotes: ReadonlyArray<VoiceNoteForPlayback>;
playbackRate: number;
// timestamp of the message after all the once in the queue
nextMessageTimestamp: number | undefined;
};
export const selectVoiceNoteAndConsecutive = createSelector(
getConversations,
getSelectedConversationId,
getConversationByIdSelector,
getUserConversationId,
(
conversations,
selectedConversationId,
getConversationById,
ourConversationId
) => {
return (
messageId: string
): VoiceNoteAndConsecutiveForPlayback | undefined => {
const message = conversations.messagesLookup[messageId];
if (!message) {
log.warn('selectVoiceNoteData: message not found', {
message: messageId,
});
return;
}
const voiceNote = extractVoiceNoteForPlayback(message, ourConversationId);
if (!voiceNote) {
log.warn('selectVoiceNoteData: message not a voice note', {
message: messageId,
});
return undefined;
}
if (!selectedConversationId) {
log.warn('selectVoiceNoteData: no selected conversation id', {
message: messageId,
});
return undefined;
}
const conversationMessages =
conversations.messagesByConversation[selectedConversationId];
if (!conversationMessages) {
log.warn('selectedVoiceNote: no conversation messages', {
message: messageId,
});
return;
}
let idx = conversationMessages.messageIds.indexOf(messageId);
// useful if inserting into an active queue
const previousMessageId = conversationMessages.messageIds[idx - 1];
const consecutiveVoiceNotes: Array<VoiceNoteForPlayback> = [];
let nextMessageId: string;
let nextMessage: MessageWithUIFieldsType | undefined;
let nextVoiceNote: VoiceNoteForPlayback | undefined;
do {
idx += 1;
nextMessageId = conversationMessages.messageIds[idx];
if (!nextMessageId) {
nextMessage = undefined;
break;
}
nextMessage = conversations.messagesLookup[nextMessageId];
if (!nextMessage) {
break;
}
if (nextMessage.deletedForEveryone) {
continue;
}
nextVoiceNote = extractVoiceNoteForPlayback(
nextMessage,
ourConversationId
);
if (nextVoiceNote) {
consecutiveVoiceNotes.push(nextVoiceNote);
}
} while (nextVoiceNote);
const conversation = getConversationById(selectedConversationId);
return {
conversationId: selectedConversationId,
voiceNote,
consecutiveVoiceNotes,
playbackRate: conversation?.voiceNotePlaybackRate ?? 1,
previousMessageId,
nextMessageTimestamp: nextMessage?.timestamp,
};
};
}
);

View file

@ -163,7 +163,7 @@ export function hasErrors(
}
export function getSource(
message: MessageWithUIFieldsType,
message: Pick<MessageAttributesType, 'type' | 'source'>,
ourNumber: string | undefined
): string | undefined {
if (isIncoming(message)) {
@ -195,7 +195,7 @@ export function getSourceDevice(
}
export function getSourceUuid(
message: MessageWithUIFieldsType,
message: Pick<MessageAttributesType, 'type' | 'sourceUuid'>,
ourACI: string | undefined
): string | undefined {
if (isIncoming(message)) {
@ -1554,13 +1554,16 @@ export function getPropsForEmbeddedContact(
return embeddedContactSelector(firstContact, {
regionCode,
getAbsoluteAttachmentPath:
window.Signal.Migrations.getAbsoluteAttachmentPath,
getAbsoluteAttachmentPath: getAttachmentUrlForPath,
firstNumber,
uuid: accountSelector(firstNumber),
});
}
export function getAttachmentUrlForPath(path: string): string {
return window.Signal.Migrations.getAbsoluteAttachmentPath(path);
}
export function getPropsForAttachment(
attachment: AttachmentType
): AttachmentType | undefined {
@ -1575,23 +1578,17 @@ export function getPropsForAttachment(
fileSize: size ? filesize(size) : undefined,
isVoiceMessage: isVoiceMessage(attachment),
pending,
url: path
? window.Signal.Migrations.getAbsoluteAttachmentPath(path)
: undefined,
url: path ? getAttachmentUrlForPath(path) : undefined,
screenshot: screenshot?.path
? {
...screenshot,
url: window.Signal.Migrations.getAbsoluteAttachmentPath(
screenshot.path
),
url: getAttachmentUrlForPath(screenshot.path),
}
: undefined,
thumbnail: thumbnail?.path
? {
...thumbnail,
url: window.Signal.Migrations.getAbsoluteAttachmentPath(
thumbnail.path
),
url: getAttachmentUrlForPath(thumbnail.path),
}
: undefined,
};
@ -1602,9 +1599,7 @@ function processQuoteAttachment(
): QuotedAttachmentType {
const { thumbnail } = attachment;
const path =
thumbnail &&
thumbnail.path &&
window.Signal.Migrations.getAbsoluteAttachmentPath(thumbnail.path);
thumbnail && thumbnail.path && getAttachmentUrlForPath(thumbnail.path);
const objectUrl = thumbnail && thumbnail.objectUrl;
const thumbnailWithObjectUrl =

View file

@ -15,6 +15,7 @@ import { getIntl } from '../selectors/user';
import { useConversationsActions } from '../ducks/conversations';
import { useGlobalModalActions } from '../ducks/globalModals';
import { useLightboxActions } from '../ducks/lightbox';
import { useAudioPlayerActions } from '../ducks/audioPlayer';
import {
getIsViewOnce,
getMedia,
@ -27,6 +28,7 @@ export function SmartLightbox(): JSX.Element | null {
const { saveAttachment } = useConversationsActions();
const { closeLightbox } = useLightboxActions();
const { toggleForwardMessageModal } = useGlobalModalActions();
const { pauseVoiceNotePlayer } = useAudioPlayerActions();
const conversationSelector = useSelector<StateType, GetConversationByIdType>(
getConversationSelector
@ -54,6 +56,7 @@ export function SmartLightbox(): JSX.Element | null {
saveAttachment={saveAttachment}
selectedIndex={selectedIndex || 0}
toggleForwardMessageModal={toggleForwardMessageModal}
onMediaPlaybackStart={pauseVoiceNotePlayer}
/>
);
}

View file

@ -1,35 +1,79 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { connect } from 'react-redux';
import { pick } from 'lodash';
import React, { useCallback } from 'react';
import { useSelector } from 'react-redux';
import { MessageAudio } from '../../components/conversation/MessageAudio';
import type { OwnProps as MessageAudioOwnProps } from '../../components/conversation/MessageAudio';
import { mapDispatchToProps } from '../actions';
import type { StateType } from '../reducer';
import type { ActiveAudioPlayerStateType } from '../ducks/audioPlayer';
import { useAudioPlayerActions } from '../ducks/audioPlayer';
import {
selectAudioPlayerActive,
selectVoiceNoteAndConsecutive,
} from '../selectors/audioPlayer';
import { useConversationsActions } from '../ducks/conversations';
import { getUserConversationId } from '../selectors/user';
import * as log from '../../logging/log';
export type Props = Omit<MessageAudioOwnProps, 'active'>;
export type Props = Omit<MessageAudioOwnProps, 'active' | 'onPlayMessage'> & {
renderingContext: string;
};
const mapStateToProps = (
state: StateType,
props: Props
): MessageAudioOwnProps => {
const { active } = state.audioPlayer;
export function SmartMessageAudio({
renderingContext,
...props
}: Props): JSX.Element | null {
const active = useSelector(selectAudioPlayerActive);
const { loadMessageAudio, setIsPlaying, setPlaybackRate, setCurrentTime } =
useAudioPlayerActions();
const { pushPanelForConversation } = useConversationsActions();
const getVoiceNoteData = useSelector(selectVoiceNoteAndConsecutive);
const ourConversationId = useSelector(getUserConversationId);
const messageActive: ActiveAudioPlayerStateType | undefined =
active &&
active.id === props.id &&
active.context === props.renderingContext
? pick(active, 'playing', 'playbackRate', 'currentTime', 'duration')
active.content &&
active.content.current.id === props.id &&
active.content.context === renderingContext
? active
: undefined;
return {
...props,
active: messageActive,
};
};
const smart = connect(mapStateToProps, mapDispatchToProps);
export const SmartMessageAudio = smart(MessageAudio);
const handlePlayMessage = useCallback(
(id: string, position: number) => {
const voiceNoteData = getVoiceNoteData(id);
if (!voiceNoteData) {
log.warn('SmartMessageAudio: voice note not found', {
message: id,
});
return;
}
if (!ourConversationId) {
log.warn('SmartMessageAudio: no ourConversationId');
return;
}
loadMessageAudio({
voiceNoteData,
position,
context: renderingContext,
ourConversationId,
});
},
[getVoiceNoteData, loadMessageAudio, ourConversationId, renderingContext]
);
return (
<MessageAudio
active={messageActive}
onPlayMessage={handlePlayMessage}
setPlaybackRate={setPlaybackRate}
setIsPlaying={setIsPlaying}
setCurrentTime={setCurrentTime}
pushPanelForConversation={pushPanelForConversation}
{...props}
/>
);
}

View file

@ -0,0 +1,83 @@
// Copyright 2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import React, { useCallback, useEffect } from 'react';
import { useSelector } from 'react-redux';
import { MiniPlayer, PlayerState } from '../../components/MiniPlayer';
import { usePrevious } from '../../hooks/usePrevious';
import { useAudioPlayerActions } from '../ducks/audioPlayer';
import {
selectAudioPlayerActive,
selectVoiceNoteTitle,
} from '../selectors/audioPlayer';
import { getIntl } from '../selectors/user';
/**
* Wires the dispatch props and shows/hides the MiniPlayer
*
* It also triggers side-effecting actions (actual playback) in response to changes in
* the state
*/
export function SmartMiniPlayer(): JSX.Element | null {
const i18n = useSelector(getIntl);
const active = useSelector(selectAudioPlayerActive);
const getVoiceNoteTitle = useSelector(selectVoiceNoteTitle);
const {
setIsPlaying,
setPlaybackRate,
unloadMessageAudio,
playMessageAudio,
} = useAudioPlayerActions();
const handlePlay = useCallback(() => setIsPlaying(true), [setIsPlaying]);
const handlePause = useCallback(() => setIsPlaying(false), [setIsPlaying]);
const previousContent = usePrevious(undefined, active?.content);
useEffect(() => {
if (!active) {
return;
}
const { content } = active;
// if no content, stop playing
if (!content) {
if (active.playing) {
setIsPlaying(false);
}
return;
}
// if the content changed, play the new content
if (content.current.id !== previousContent?.current.id) {
playMessageAudio(content.isConsecutive);
}
// if the start position changed, play at new position
if (content.startPosition !== previousContent?.startPosition) {
playMessageAudio(false);
}
});
if (!active?.content) {
return null;
}
let state = PlayerState.loading;
if (active.content.current.url) {
state = active.playing ? PlayerState.playing : PlayerState.paused;
}
return (
<MiniPlayer
i18n={i18n}
title={getVoiceNoteTitle(active.content.current)}
onPlay={handlePlay}
onPause={handlePause}
onPlaybackRate={setPlaybackRate}
onClose={unloadMessageAudio}
state={state}
currentTime={active.currentTime}
duration={active.duration}
playbackRate={active.playbackRate}
/>
);
}

View file

@ -25,6 +25,7 @@ import { useConversationsActions } from '../ducks/conversations';
import { useGlobalModalActions } from '../ducks/globalModals';
import { useStoriesActions } from '../ducks/stories';
import { useToastActions } from '../ducks/toast';
import { useAudioPlayerActions } from '../ducks/audioPlayer';
function renderStoryCreator(): JSX.Element {
return <SmartStoryCreator />;
@ -66,6 +67,8 @@ export function SmartStories(): JSX.Element | null {
const hasViewReceiptSetting = useSelector(getHasStoryViewReceiptSetting);
const { pauseVoiceNotePlayer } = useAudioPlayerActions();
if (!isShowingStoriesView) {
return null;
}
@ -84,6 +87,7 @@ export function SmartStories(): JSX.Element | null {
saveAttachment(story.attachment, story.timestamp);
}
}}
onMediaPlaybackStart={pauseVoiceNotePlayer}
preferredWidthFromStorage={preferredWidthFromStorage}
renderStoryCreator={renderStoryCreator}
retryMessageSend={retryMessageSend}

View file

@ -40,6 +40,7 @@ import { useLinkPreviewActions } from '../ducks/linkPreviews';
import { useRecentEmojis } from '../selectors/emojis';
import { useStoriesActions } from '../ducks/stories';
import { useStoryDistributionListsActions } from '../ducks/storyDistributionLists';
import { useAudioPlayerActions } from '../ducks/audioPlayer';
export type PropsType = {
file?: File;
@ -91,6 +92,7 @@ export function SmartStoryCreator(): JSX.Element | null {
const skinTone = useSelector<StateType, number>(getEmojiSkinTone);
const { onSetSkinTone } = useItemsActions();
const { onUseEmoji } = useEmojisActions();
const { pauseVoiceNotePlayer } = useAudioPlayerActions();
return (
<StoryCreator
@ -122,6 +124,7 @@ export function SmartStoryCreator(): JSX.Element | null {
onSetSkinTone={onSetSkinTone}
onUseEmoji={onUseEmoji}
onViewersUpdated={updateStoryViewers}
onMediaPlaybackStart={pauseVoiceNotePlayer}
ourConversationId={ourConversationId}
processAttachment={processAttachment}
recentEmojis={recentEmojis}

View file

@ -35,6 +35,7 @@ import { useActions as useEmojisActions } from '../ducks/emojis';
import { useConversationsActions } from '../ducks/conversations';
import { useRecentEmojis } from '../selectors/emojis';
import { useActions as useItemsActions } from '../ducks/items';
import { useAudioPlayerActions } from '../ducks/audioPlayer';
import { useStoriesActions } from '../ducks/stories';
import { useIsWindowActive } from '../../hooks/useIsWindowActive';
@ -85,6 +86,8 @@ export function SmartStoryViewer(): JSX.Element | null {
getHasStoryViewReceiptSetting
);
const { pauseVoiceNotePlayer } = useAudioPlayerActions();
const storyInfo = getStoryById(
conversationSelector,
selectedStoryData.messageId
@ -134,6 +137,7 @@ export function SmartStoryViewer(): JSX.Element | null {
onSetSkinTone={onSetSkinTone}
onTextTooLong={() => showToast(ToastType.MessageBodyTooLong)}
onUseEmoji={onUseEmoji}
onMediaPlaybackStart={pauseVoiceNotePlayer}
preferredReactionEmoji={preferredReactionEmoji}
recentEmojis={recentEmojis}
renderEmojiPicker={renderEmojiPicker}

View file

@ -3,10 +3,10 @@
import { connect } from 'react-redux';
import { mapDispatchToProps } from '../actions';
import { GlobalAudioProvider } from '../../components/GlobalAudioContext';
import { VoiceNotesPlaybackProvider } from '../../components/VoiceNotesPlaybackContext';
import type { StateType } from '../reducer';
import { isPaused } from '../selectors/audioPlayer';
import { getSelectedConversationId } from '../selectors/conversations';
import { isPaused } from '../selectors/audioPlayer';
const mapStateToProps = (state: StateType) => {
return {
@ -17,4 +17,6 @@ const mapStateToProps = (state: StateType) => {
const smart = connect(mapStateToProps, mapDispatchToProps);
export const SmartGlobalAudioProvider = smart(GlobalAudioProvider);
export const SmartVoiceNotesPlaybackProvider = smart(
VoiceNotesPlaybackProvider
);

View file

@ -3,7 +3,7 @@
import type { ReactElement } from 'react';
import React from 'react';
import { GlobalAudioContext } from '../../components/GlobalAudioContext';
import { VoiceNotesPlaybackContext } from '../../components/VoiceNotesPlaybackContext';
import type { Props as MessageAudioProps } from './MessageAudio';
import { SmartMessageAudio } from './MessageAudio';
@ -13,14 +13,14 @@ export function renderAudioAttachment(
props: AudioAttachmentProps
): ReactElement {
return (
<GlobalAudioContext.Consumer>
{globalAudioProps => {
<VoiceNotesPlaybackContext.Consumer>
{voiceNotesPlaybackProps => {
return (
globalAudioProps && (
<SmartMessageAudio {...props} {...globalAudioProps} />
voiceNotesPlaybackProps && (
<SmartMessageAudio {...props} {...voiceNotesPlaybackProps} />
)
);
}}
</GlobalAudioContext.Consumer>
</VoiceNotesPlaybackContext.Consumer>
);
}

View file

@ -3,8 +3,6 @@
import { assert } from 'chai';
import type { SetMessageAudioAction } from '../../../state/ducks/audioPlayer';
import type { SelectedConversationChangedActionType } from '../../../state/ducks/conversations';
import {
SELECTED_CONVERSATION_CHANGED,
actions as conversationsActions,
@ -13,24 +11,35 @@ import { noopAction } from '../../../state/ducks/noop';
import type { StateType } from '../../../state/reducer';
import { reducer as rootReducer } from '../../../state/reducer';
import type { SelectedConversationChangedActionType } from '../../../state/ducks/conversations';
import { actions } from '../../../state/ducks/audioPlayer';
import type { VoiceNoteAndConsecutiveForPlayback } from '../../../state/selectors/audioPlayer';
const { messageDeleted, messageChanged } = conversationsActions;
const MESSAGE_ID = 'message-id';
// can't use the actual action since it's a ThunkAction
const setMessageAudio = (
id: string,
context: string
): SetMessageAudioAction => ({
type: 'audioPlayer/SET_MESSAGE_AUDIO',
payload: {
id,
context,
playbackRate: 1,
duration: 100,
function voiceNoteDataForMessage(
messageId: string
): VoiceNoteAndConsecutiveForPlayback {
return {
conversationId: 'convo',
voiceNote: {
id: messageId,
type: 'outgoing',
timestamp: 0,
url: undefined,
source: undefined,
sourceUuid: undefined,
messageIdForLogging: messageId,
isPlayed: false,
},
});
consecutiveVoiceNotes: [],
previousMessageId: undefined,
nextMessageTimestamp: undefined,
playbackRate: 1,
};
}
describe('both/state/ducks/audioPlayer', () => {
const getEmptyRootState = (): StateType => {
@ -39,26 +48,51 @@ describe('both/state/ducks/audioPlayer', () => {
const getInitializedState = (): StateType => {
const state = getEmptyRootState();
const updated = rootReducer(state, setMessageAudio(MESSAGE_ID, 'context'));
const updated = rootReducer(
state,
actions.loadMessageAudio({
voiceNoteData: voiceNoteDataForMessage(MESSAGE_ID),
position: 0,
context: 'context',
ourConversationId: 'convo',
})
);
assert.strictEqual(updated.audioPlayer.active?.id, MESSAGE_ID);
assert.strictEqual(updated.audioPlayer.active?.context, 'context');
assert.strictEqual(
updated.audioPlayer.active?.content?.current.id,
MESSAGE_ID
);
assert.strictEqual(updated.audioPlayer.active?.content?.context, 'context');
return updated;
};
describe('setActiveAudioID', () => {
it("updates `activeAudioID` in the audioPlayer's state", () => {
describe('loadMessageAudio', () => {
it("updates `active` in the audioPlayer's state", () => {
const state = getEmptyRootState();
assert.strictEqual(state.audioPlayer.active, undefined);
const updated = rootReducer(state, setMessageAudio('test', 'context'));
assert.strictEqual(updated.audioPlayer.active?.id, 'test');
assert.strictEqual(updated.audioPlayer.active?.context, 'context');
const updated = rootReducer(
state,
actions.loadMessageAudio({
voiceNoteData: voiceNoteDataForMessage('test'),
position: 0,
context: 'context',
ourConversationId: 'convo',
})
);
assert.strictEqual(
updated.audioPlayer.active?.content?.current.id,
'test'
);
assert.strictEqual(
updated.audioPlayer.active?.content?.context,
'context'
);
});
});
it('resets activeAudioID when changing the conversation', () => {
it('active is not changed when changing the conversation', () => {
const state = getInitializedState();
const updated = rootReducer(state, <SelectedConversationChangedActionType>{
@ -66,10 +100,13 @@ describe('both/state/ducks/audioPlayer', () => {
payload: { id: 'any' },
});
assert.strictEqual(updated.audioPlayer.active, undefined);
assert.strictEqual(
updated.audioPlayer.active?.content?.current.id,
MESSAGE_ID
);
});
it('resets activeAudioID when message was deleted', () => {
it('resets active.content when message was deleted', () => {
const state = getInitializedState();
const updated = rootReducer(
@ -77,10 +114,10 @@ describe('both/state/ducks/audioPlayer', () => {
messageDeleted(MESSAGE_ID, 'conversation-id')
);
assert.strictEqual(updated.audioPlayer.active, undefined);
assert.strictEqual(updated.audioPlayer.active?.content, undefined);
});
it('resets activeAudioID when message was erased', () => {
it('resets active.content when message is DOE', () => {
const state = getInitializedState();
const updated = rootReducer(
@ -97,6 +134,6 @@ describe('both/state/ducks/audioPlayer', () => {
})
);
assert.strictEqual(updated.audioPlayer.active, undefined);
assert.strictEqual(updated.audioPlayer.active?.content, undefined);
});
});

View file

@ -1963,9 +1963,9 @@ describe('calling duck', () => {
});
});
it("doesn't dispatch any actions for group calls", () => {
it("doesn't dispatch any actions for group calls", async () => {
const dispatch = sinon.spy();
startCall({
await startCall({
callMode: CallMode.Group,
conversationId: '123',
hasLocalAudio: true,

View file

@ -20,6 +20,7 @@ import type {
MessageType,
SelectedConversationChangedActionType,
ToggleConversationInChooseMembersActionType,
MessageChangedActionType,
} from '../../../state/ducks/conversations';
import {
SELECTED_CONVERSATION_CHANGED,
@ -57,6 +58,7 @@ import {
VIEWERS_CHANGED,
} from '../../../state/ducks/storyDistributionLists';
import { MY_STORY_ID } from '../../../types/Stories';
import type { MessageAttributesType } from '../../../model-types.d';
const {
clearGroupCreationError,
@ -67,7 +69,6 @@ const {
conversationStoppedByMissingVerification,
createGroup,
discardMessages,
messageChanged,
repairNewestMessage,
repairOldestMessage,
resetAllChatColors,
@ -86,6 +87,22 @@ const {
toggleConversationInChooseMembers,
} = actions;
// can't use messageChanged action creator because it's a ThunkAction
function messageChanged(
messageId: string,
conversationId: string,
data: MessageAttributesType
): MessageChangedActionType {
return {
type: 'MESSAGE_CHANGED',
payload: {
id: messageId,
conversationId,
data,
},
};
}
describe('both/state/ducks/conversations', () => {
const UUID_1 = UUID.generate().toString();
const UUID_2 = UUID.generate().toString();

View file

@ -2,25 +2,34 @@
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import type { SetMessageAudioAction } from '../../../state/ducks/audioPlayer';
import { noopAction } from '../../../state/ducks/noop';
import type { VoiceNoteAndConsecutiveForPlayback } from '../../../state/selectors/audioPlayer';
import { isPaused } from '../../../state/selectors/audioPlayer';
import { actions } from '../../../state/ducks/audioPlayer';
import type { StateType } from '../../../state/reducer';
import { reducer as rootReducer } from '../../../state/reducer';
// can't use the actual action since it's a ThunkAction
const setActiveAudioID = (
id: string,
context: string
): SetMessageAudioAction => ({
type: 'audioPlayer/SET_MESSAGE_AUDIO',
payload: {
id,
context,
playbackRate: 1,
duration: 100,
function voiceNoteDataForMessage(
messageId: string
): VoiceNoteAndConsecutiveForPlayback {
return {
conversationId: 'convo',
voiceNote: {
id: messageId,
type: 'outgoing',
timestamp: 0,
url: undefined,
source: undefined,
sourceUuid: undefined,
messageIdForLogging: messageId,
isPlayed: false,
},
});
consecutiveVoiceNotes: [],
previousMessageId: undefined,
nextMessageTimestamp: undefined,
playbackRate: 1,
};
}
describe('state/selectors/audioPlayer', () => {
const getEmptyRootState = (): StateType => {
@ -36,7 +45,15 @@ describe('state/selectors/audioPlayer', () => {
it('returns false if state.audioPlayer.active is not undefined', () => {
const state = getEmptyRootState();
const updated = rootReducer(state, setActiveAudioID('id', 'context'));
const updated = rootReducer(
state,
actions.loadMessageAudio({
voiceNoteData: voiceNoteDataForMessage('id'),
position: 0,
context: 'context',
ourConversationId: 'convo',
})
);
assert.isFalse(isPaused(updated));
});

View file

@ -0,0 +1,21 @@
// Copyright 2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
/**
* Convert seconds to a string showing mm:ss or hh:mm:ss for displaying in an audio player
*/
export const durationToPlaybackText = (time: number): string => {
const hours = Math.floor(time / 3600);
let minutes = Math.floor((time % 3600) / 60).toString();
let seconds = Math.floor(time % 60).toString();
if (hours !== 0 && minutes.length < 2) {
minutes = `0${minutes}`;
}
if (seconds.length < 2) {
seconds = `0${seconds}`;
}
return hours ? `${hours}:${minutes}:${seconds}` : `${minutes}:${seconds}`;
};