New UI for audio playback and global audio player

Introduce new UI and behavior for playing audio attachments in
conversations. Previously, playback stopped unexpectedly during window
resizes and scrolling through the messages due to the row height
recomputation in `react-virtualized`.

With this commit we introduce `<GlobalAudioContext/>` instance that
wraps whole conversation and provides an `<audio/>` element that
doesn't get re-rendered (or destroyed) whenever `react-virtualized`
recomputes messages. The audio players (with a freshly designed UI) now
share this global `<audio/>` instance and manage access to it using
`audioPlayer.owner` state from the redux.

New UI computes on the fly, caches, and displays waveforms for each
audio attachment. Storybook had to be slightly modified to accomodate
testing of Android bubbles by introducing the new knob for
`authorColor`.
This commit is contained in:
Fedor Indutny 2021-03-10 12:36:58 -08:00 committed by Josh Perez
parent 1ca4960924
commit 12d7f24d0f
30 changed files with 1176 additions and 102 deletions

View file

@ -1301,6 +1301,24 @@ Signal Desktop makes use of the following open source projects.
licenses; we recommend you read them, as their terms may differ from the
terms above.
## lru-cache
The ISC License
Copyright (c) Isaac Z. Schlueter and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR
IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
## memoizee
ISC License

View file

@ -5042,5 +5042,17 @@
"cannotSelectContact": {
"message": "Cannot select contact",
"description": "The label for contact checkboxes that are disabled"
},
"MessageAudio--play": {
"message": "Play audio attachment",
"description": "Aria label for audio attachment's Play button"
},
"MessageAudio--pause": {
"message": "Pause audio attachment",
"description": "Aria label for audio attachment's Pause button"
},
"MessageAudio--slider": {
"message": "Playback time of audio attachment",
"description": "Aria label for audio attachment's playback time slider"
}
}

View file

@ -0,0 +1 @@
<svg width="20" height="20" fill="none" xmlns="http://www.w3.org/2000/svg"><path d="M7.917 1.667H3.75a.417.417 0 0 0-.417.416v15.834c0 .23.187.416.417.416h4.167c.23 0 .416-.186.416-.416V2.083a.417.417 0 0 0-.416-.416zM16.25 1.667h-4.167a.417.417 0 0 0-.416.416v15.834c0 .23.186.416.416.416h4.167c.23 0 .417-.186.417-.416V2.083a.417.417 0 0 0-.417-.416z" fill="#000"/></svg>

After

Width:  |  Height:  |  Size: 373 B

View file

@ -0,0 +1 @@
<svg xmlns="http://www.w3.org/2000/svg" width="20" height="20"><path d="M17.247 9.248a.8.8 0 0 1 0 1.5L4.3 18.248c-.714.414-1.3.077-1.3-.748v-15c0-.825.584-1.162 1.3-.748z"/></svg>

After

Width:  |  Height:  |  Size: 180 B

View file

@ -93,8 +93,10 @@
"intl-tel-input": "12.1.15",
"jquery": "3.5.0",
"js-yaml": "3.13.1",
"libsignal-client": "https://github.com/signalapp/libsignal-client-node.git#f10fbd04eb6efb396eb12c25429761e8785dc9d0",
"linkify-it": "2.2.0",
"lodash": "4.17.20",
"lru-cache": "6.0.0",
"memoizee": "0.4.14",
"mkdirp": "0.5.2",
"moment": "2.21.0",
@ -147,8 +149,7 @@
"underscore": "1.9.0",
"uuid": "3.3.2",
"websocket": "1.0.28",
"zkgroup": "https://github.com/signalapp/signal-zkgroup-node.git#2d7db946cc88492b65cc66e9aa9de0c9e664fd8d",
"libsignal-client": "https://github.com/signalapp/libsignal-client-node.git#f10fbd04eb6efb396eb12c25429761e8785dc9d0"
"zkgroup": "https://github.com/signalapp/signal-zkgroup-node.git#2d7db946cc88492b65cc66e9aa9de0c9e664fd8d"
},
"devDependencies": {
"@babel/core": "7.7.7",
@ -178,6 +179,7 @@
"@types/linkify-it": "2.1.0",
"@types/lodash": "4.14.106",
"@types/long": "4.0.1",
"@types/lru-cache": "5.1.0",
"@types/memoizee": "0.4.2",
"@types/mkdirp": "0.5.2",
"@types/mocha": "5.0.0",

View file

@ -686,18 +686,6 @@
cursor: pointer;
}
.module-message__audio-attachment {
margin-top: 2px;
}
.module-message__audio-attachment--with-content-below {
margin-bottom: 5px;
}
.module-message__audio-attachment--with-content-above {
margin-top: 6px;
}
.module-message__generic-attachment {
@include button-reset;

View file

@ -30,13 +30,16 @@ $color-black: #000000;
$color-white-alpha-20: rgba($color-white, 0.2);
$color-white-alpha-40: rgba($color-white, 0.4);
$color-white-alpha-60: rgba($color-white, 0.6);
$color-white-alpha-70: rgba($color-white, 0.7);
$color-white-alpha-80: rgba($color-white, 0.8);
$color-white-alpha-90: rgba($color-white, 0.9);
$color-black-alpha-05: rgba($color-black, 0.05);
$color-black-alpha-20: rgba($color-black, 0.2);
$color-black-alpha-40: rgba($color-black, 0.4);
$color-black-alpha-50: rgba($color-black, 0.5);
$color-black-alpha-60: rgba($color-black, 0.6);
$color-black-alpha-80: rgba($color-black, 0.8);
$ultramarine-brand-light: #3a76f0;
$ultramarine-brand-dark: #1851b4;

View file

@ -0,0 +1,282 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
.module-message__audio-attachment {
display: flex;
flex-direction: row;
align-items: center;
margin-top: 2px;
}
/* The separator between audio and text */
.module-message__audio-attachment--with-content-below {
border-bottom: 1px solid $color-white-alpha-20;
padding-bottom: 12px;
margin-bottom: 7px;
.module-message__audio-attachment--incoming & {
@mixin android {
border-color: $color-white-alpha-20;
}
@include light-theme {
@include android;
}
@include dark-theme {
@include android;
}
@include ios-theme {
border-color: $color-black-alpha-20;
}
@include ios-dark-theme {
border-color: $color-white-alpha-20;
}
}
.module-message__container--outgoing & {
@mixin ios {
border-color: $color-white-alpha-20;
}
@include light-theme {
border-color: $color-black-alpha-20;
}
@include dark-theme {
border-color: $color-white-alpha-20;
}
@include ios-theme {
@include ios;
}
@include ios-dark-theme {
@include ios;
}
}
}
.module-message__audio-attachment--with-content-above {
margin-top: 6px;
}
.module-message__audio-attachment__button {
flex-shrink: 0;
width: 36px;
height: 36px;
@include button-reset;
outline: none;
border-radius: 18px;
&::before {
display: block;
height: 100%;
content: '';
}
@mixin audio-icon($name, $color) {
&--#{$name}::before {
@include color-svg(
'../images/icons/v2/#{$name}-solid-20.svg',
$color,
false
);
}
}
.module-message__audio-attachment--incoming & {
@mixin android {
background: $color-white-alpha-20;
@include audio-icon(play, $color-white);
@include audio-icon(pause, $color-white);
}
@include light-theme {
@include android;
}
@include dark-theme {
@include android;
}
@include ios-theme {
background: $color-white;
@include audio-icon(play, $color-gray-60);
@include audio-icon(pause, $color-gray-60);
}
@include ios-dark-theme {
background: $color-gray-60;
@include audio-icon(play, $color-gray-15);
@include audio-icon(pause, $color-gray-15);
}
}
.module-message__audio-attachment--outgoing & {
@mixin android {
background: $color-white;
@include audio-icon(play, $color-gray-60);
@include audio-icon(pause, $color-gray-60);
}
@mixin ios {
background: $color-white-alpha-20;
@include audio-icon(play, $color-white);
@include audio-icon(pause, $color-white);
}
@include light-theme {
@include android;
}
@include dark-theme {
@include android;
}
@include ios-theme {
@include ios;
}
@include ios-dark-theme {
@include ios;
}
}
}
.module-message__audio-attachment__waveform {
flex-shrink: 0;
margin-left: 12px;
display: flex;
align-items: center;
outline: 0;
}
.module-message__audio-attachment__waveform__bar {
display: inline-block;
width: 2px;
border-radius: 2px;
transition: height 250ms, background 250ms;
&:not(:first-of-type) {
margin-left: 2px;
}
.module-message__audio-attachment--incoming & {
@mixin android {
background: $color-white-alpha-40;
&--active {
background: $color-white-alpha-80;
}
}
@include light-theme {
@include android;
}
@include dark-theme {
@include android;
}
@include ios-theme {
background: $color-black-alpha-40;
&--active {
background: $color-black-alpha-80;
}
}
@include ios-dark-theme {
background: $color-white-alpha-40;
&--active {
background: $color-white-alpha-70;
}
}
}
.module-message__audio-attachment--outgoing & {
@mixin ios {
background: $color-white-alpha-40;
&--active {
background: $color-white-alpha-80;
}
}
@include light-theme {
background: $color-black-alpha-20;
&--active {
background: $color-black-alpha-50;
}
}
@include dark-theme {
background: $color-white-alpha-40;
&--active {
background: $color-white-alpha-80;
}
}
@include ios-theme {
@include ios;
}
@include ios-dark-theme {
@include ios;
}
}
}
.module-message__audio-attachment__duration {
flex-shrink: 1;
margin-left: 12px;
@include font-caption;
.module-message__audio-attachment--incoming & {
@mixin android {
color: $color-white-alpha-80;
}
@include light-theme {
@include android;
}
@include dark-theme {
@include android;
}
@include ios-theme {
color: $color-black-alpha-60;
}
@include ios-dark-theme {
color: $color-white-alpha-80;
}
}
.module-message__audio-attachment--outgoing & {
@mixin ios {
color: $color-white-alpha-80;
}
@include light-theme {
color: $color-gray-60;
}
@include dark-theme {
color: $color-white-alpha-80;
}
@include ios-theme {
@include ios;
}
@include ios-dark-theme {
@include ios;
}
}
}
@media (min-width: 0px) and (max-width: 799px) {
.module-message__audio-attachment__waveform {
margin-left: 4px;
}
/* Clip the duration text when it is too long on small screens */
.module-message__audio-attachment__duration {
margin-left: 4px;
max-width: 46px;
white-space: nowrap;
overflow: hidden;
text-overflow: ellipsis;
}
}

View file

@ -36,3 +36,4 @@
@import './components/EditConversationAttributesModal.scss';
@import './components/GroupDialog.scss';
@import './components/GroupTitleInput.scss';
@import './components/MessageAudio.scss';

View file

@ -0,0 +1,78 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import * as React from 'react';
import LRU from 'lru-cache';
import { WaveformCache } from '../types/Audio';
const MAX_WAVEFORM_COUNT = 1000;
type Contents = {
audio: HTMLAudioElement;
audioContext: AudioContext;
waveformCache: WaveformCache;
};
export const GlobalAudioContext = React.createContext<Contents | null>(null);
export type GlobalAudioProps = {
conversationId: string;
children?: React.ReactNode | React.ReactChildren;
};
/**
* A global context that holds Audio, AudioContext, LRU instances that are used
* inside the conversation by ts/components/conversation/MessageAudio.tsx
*/
export const GlobalAudioProvider: React.FC<GlobalAudioProps> = ({
conversationId,
children,
}) => {
const audio = React.useMemo(() => {
window.log.info(
'GlobalAudioProvider: re-generating audio for',
conversationId
);
return new Audio();
}, [conversationId]);
// NOTE: the number of active audio contexts is limited per tab/window
// See: https://developer.mozilla.org/en-US/docs/Web/API/AudioContext/AudioContext#google_chrome
const audioContext = React.useMemo(() => {
window.log.info('Instantiating new audio context');
return new AudioContext();
}, []);
const waveformCache: WaveformCache = React.useMemo(() => {
return new LRU({
max: MAX_WAVEFORM_COUNT,
});
}, []);
// When moving between conversations - stop audio
React.useEffect(() => {
return () => {
audio.pause();
};
}, [audio, conversationId]);
React.useEffect(() => {
return () => {
window.log.info('Closing old audio context');
audioContext.close();
};
}, [audioContext]);
const value = {
audio,
audioContext,
waveformCache,
};
return (
<GlobalAudioContext.Provider value={value}>
{children}
</GlobalAudioContext.Provider>
);
};

View file

@ -3,14 +3,16 @@
import * as React from 'react';
import { isBoolean } from 'lodash';
import LRU from 'lru-cache';
import { action } from '@storybook/addon-actions';
import { boolean, number, text } from '@storybook/addon-knobs';
import { boolean, number, text, select } from '@storybook/addon-knobs';
import { storiesOf } from '@storybook/react';
import { Colors } from '../../types/Colors';
import { WaveformCache } from '../../types/Audio';
import { EmojiPicker } from '../emoji/EmojiPicker';
import { Message, Props } from './Message';
import { Message, Props, AudioAttachmentProps } from './Message';
import {
AUDIO_MP3,
IMAGE_JPEG,
@ -19,6 +21,7 @@ import {
MIMEType,
VIDEO_MP4,
} from '../../types/MIME';
import { MessageAudio } from './MessageAudio';
import { setup as setupI18n } from '../../../js/modules/i18n';
import enMessages from '../../../_locales/en/messages.json';
import { pngUrl } from '../../storybook/Fixtures';
@ -42,10 +45,35 @@ const renderEmojiPicker: Props['renderEmojiPicker'] = ({
/>
);
const MessageAudioContainer: React.FC<AudioAttachmentProps> = props => {
const [activeAudioID, setActiveAudioID] = React.useState<string | undefined>(
undefined
);
const audio = React.useMemo(() => new Audio(), []);
const audioContext = React.useMemo(() => new AudioContext(), []);
const waveformCache: WaveformCache = React.useMemo(() => new LRU(), []);
return (
<MessageAudio
{...props}
id="storybook"
audio={audio}
audioContext={audioContext}
waveformCache={waveformCache}
setActiveAudioID={setActiveAudioID}
activeAudioID={activeAudioID}
/>
);
};
const renderAudioAttachment: Props['renderAudioAttachment'] = props => (
<MessageAudioContainer {...props} />
);
const createProps = (overrideProps: Partial<Props> = {}): Props => ({
attachments: overrideProps.attachments,
authorId: overrideProps.authorId || 'some-id',
authorColor: overrideProps.authorColor || 'blue',
authorColor: select('authorColor', Colors, Colors[0]),
authorAvatarPath: overrideProps.authorAvatarPath,
authorTitle: text('authorTitle', overrideProps.authorTitle || ''),
bodyRanges: overrideProps.bodyRanges,
@ -89,6 +117,7 @@ const createProps = (overrideProps: Partial<Props> = {}): Props => ({
reactions: overrideProps.reactions,
reactToMessage: action('reactToMessage'),
renderEmojiPicker,
renderAudioAttachment,
replyToMessage: action('replyToMessage'),
retrySend: action('retrySend'),
scrollToQuotedMessage: action('scrollToQuotedMessage'),

View file

@ -79,6 +79,17 @@ export type DirectionType = typeof Directions[number];
export const ConversationTypes = ['direct', 'group'] as const;
export type ConversationTypesType = typeof ConversationTypes[number];
export type AudioAttachmentProps = {
id: string;
i18n: LocalizerType;
buttonRef: React.RefObject<HTMLButtonElement>;
direction: DirectionType;
theme: ThemeType | undefined;
url: string;
withContentAbove: boolean;
withContentBelow: boolean;
};
export type PropsData = {
id: string;
conversationId: string;
@ -136,6 +147,8 @@ export type PropsData = {
isBlocked: boolean;
isMessageRequestAccepted: boolean;
bodyRanges?: BodyRangesType;
renderAudioAttachment: (props: AudioAttachmentProps) => JSX.Element;
};
export type PropsHousekeeping = {
@ -219,10 +232,10 @@ const EXPIRED_DELAY = 600;
export class Message extends React.PureComponent<Props, State> {
public menuTriggerRef: Trigger | undefined;
public audioRef: React.RefObject<HTMLAudioElement> = React.createRef();
public focusRef: React.RefObject<HTMLDivElement> = React.createRef();
public audioButtonRef: React.RefObject<HTMLButtonElement> = React.createRef();
public reactionsContainerRef: React.RefObject<
HTMLDivElement
> = React.createRef();
@ -676,6 +689,8 @@ export class Message extends React.PureComponent<Props, State> {
isSticker,
text,
theme,
renderAudioAttachment,
} = this.props;
const { imageBroken } = this.state;
@ -740,24 +755,16 @@ export class Message extends React.PureComponent<Props, State> {
);
}
if (!firstAttachment.pending && isAudio(attachments)) {
return (
<audio
ref={this.audioRef}
controls
className={classNames(
'module-message__audio-attachment',
withContentBelow
? 'module-message__audio-attachment--with-content-below'
: null,
withContentAbove
? 'module-message__audio-attachment--with-content-above'
: null
)}
key={firstAttachment.url}
>
<source src={firstAttachment.url} />
</audio>
);
return renderAudioAttachment({
i18n,
buttonRef: this.audioButtonRef,
id,
direction,
theme,
url: firstAttachment.url,
withContentAbove,
withContentBelow,
});
}
const { pending, fileName, fileSize, contentType } = firstAttachment;
const extension = getExtensionForDisplay({ contentType, fileName });
@ -2043,17 +2050,13 @@ export class Message extends React.PureComponent<Props, State> {
if (
!isAttachmentPending &&
isAudio(attachments) &&
this.audioRef &&
this.audioRef.current
this.audioButtonRef &&
this.audioButtonRef.current
) {
event.preventDefault();
event.stopPropagation();
if (this.audioRef.current.paused) {
this.audioRef.current.play();
} else {
this.audioRef.current.pause();
}
this.audioButtonRef.current.click();
}
if (contact && contact.signalAccount) {

View file

@ -0,0 +1,451 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import React, { useRef, useEffect, useState } from 'react';
import classNames from 'classnames';
import { noop } from 'lodash';
import { assert } from '../../util/assert';
import { LocalizerType } from '../../types/Util';
import { WaveformCache } from '../../types/Audio';
export type Props = {
direction?: 'incoming' | 'outgoing';
id: string;
i18n: LocalizerType;
url: string;
withContentAbove: boolean;
withContentBelow: boolean;
// See: GlobalAudioContext.tsx
audio: HTMLAudioElement;
audioContext: AudioContext;
waveformCache: WaveformCache;
buttonRef: React.RefObject<HTMLButtonElement>;
activeAudioID: string | undefined;
setActiveAudioID: (id: string | undefined) => void;
};
type LoadAudioOptions = {
audioContext: AudioContext;
waveformCache: WaveformCache;
url: string;
};
type LoadAudioResult = {
duration: number;
peaks: ReadonlyArray<number>;
};
// Constants
const CSS_BASE = 'module-message__audio-attachment';
const PEAK_COUNT = 47;
const BAR_MIN_HEIGHT = 4;
const BAR_MAX_HEIGHT = 20;
// Increments for keyboard audio seek (in seconds)
const SMALL_INCREMENT = 1;
const BIG_INCREMENT = 5;
// Utils
const timeToText = (time: number): string => {
const hours = Math.floor(time / 3600);
let minutes = Math.floor((time % 3600) / 60).toString();
let seconds = Math.floor(time % 60).toString();
if (hours !== 0 && minutes.length < 2) {
minutes = `0${minutes}`;
}
if (seconds.length < 2) {
seconds = `0${seconds}`;
}
return hours ? `${hours}:${minutes}:${seconds}` : `${minutes}:${seconds}`;
};
/**
* Load audio from `url`, decode PCM data, and compute RMS peaks for displaying
* the waveform.
*
* The results are cached in the `waveformCache` which is shared across
* messages in the conversation and provided by GlobalAudioContext.
*/
// TODO(indutny): move this to GlobalAudioContext and limit the concurrency.
// see DESKTOP-1267
async function loadAudio(options: LoadAudioOptions): Promise<LoadAudioResult> {
const { audioContext, waveformCache, url } = options;
const existing = waveformCache.get(url);
if (existing) {
window.log.info('MessageAudio: waveform cache hit', url);
return Promise.resolve(existing);
}
window.log.info('MessageAudio: waveform cache miss', url);
// Load and decode `url` into a raw PCM
const response = await fetch(url);
const raw = await response.arrayBuffer();
const data = await audioContext.decodeAudioData(raw);
// Compute RMS peaks
const peaks = new Array(PEAK_COUNT).fill(0);
const norms = new Array(PEAK_COUNT).fill(0);
const samplesPerPeak = data.length / peaks.length;
for (
let channelNum = 0;
channelNum < data.numberOfChannels;
channelNum += 1
) {
const channel = data.getChannelData(channelNum);
for (let sample = 0; sample < channel.length; sample += 1) {
const i = Math.floor(sample / samplesPerPeak);
peaks[i] += channel[sample] ** 2;
norms[i] += 1;
}
}
// Average
let max = 1e-23;
for (let i = 0; i < peaks.length; i += 1) {
peaks[i] = Math.sqrt(peaks[i] / Math.max(1, norms[i]));
max = Math.max(max, peaks[i]);
}
// Normalize
for (let i = 0; i < peaks.length; i += 1) {
peaks[i] /= max;
}
const result = { peaks, duration: data.duration };
waveformCache.set(url, result);
return result;
}
/**
* Display message audio attachment along with its waveform, duration, and
* toggle Play/Pause button.
*
* The waveform is computed off the renderer thread by AudioContext, but it is
* still quite expensive, so we cache it in the `waveformCache` LRU cache.
*
* A global audio player is used for playback and access is managed by the
* `activeAudioID` property. Whenever `activeAudioID` property is equal to `id`
* the instance of the `MessageAudio` assumes the ownership of the `Audio`
* instance and fully manages it.
*/
export const MessageAudio: React.FC<Props> = (props: Props) => {
const {
i18n,
id,
direction,
url,
withContentAbove,
withContentBelow,
buttonRef,
audio,
audioContext,
waveformCache,
activeAudioID,
setActiveAudioID,
} = props;
assert(audio !== null, 'GlobalAudioContext always provides audio');
const isActive = activeAudioID === id;
const waveformRef = useRef<HTMLDivElement | null>(null);
const [isPlaying, setIsPlaying] = useState(isActive && !audio.paused);
const [currentTime, setCurrentTime] = useState(
isActive ? audio.currentTime : 0
);
// NOTE: Avoid division by zero
const [duration, setDuration] = useState(1e-23);
const [isLoading, setIsLoading] = useState(true);
const [peaks, setPeaks] = useState<ReadonlyArray<number>>(
new Array(PEAK_COUNT).fill(0)
);
// This effect loads audio file and computes its RMS peak for dispalying the
// waveform.
useEffect(() => {
if (!isLoading) {
return noop;
}
let canceled = false;
(async () => {
try {
const { peaks: newPeaks, duration: newDuration } = await loadAudio({
audioContext,
waveformCache,
url,
});
if (canceled) {
return;
}
setPeaks(newPeaks);
setDuration(Math.max(newDuration, 1e-23));
} catch (err) {
window.log.error('MessageAudio: loadAudio error', err);
} finally {
if (!canceled) {
setIsLoading(false);
}
}
})();
return () => {
canceled = true;
};
}, [url, isLoading, setPeaks, setDuration, audioContext, waveformCache]);
// This effect attaches/detaches event listeners to the global <audio/>
// instance that we reuse from the GlobalAudioContext.
//
// Audio playback changes `audio.currentTime` so we have to propagate this
// to the waveform UI.
//
// When audio ends - we have to change state and reset the position of the
// waveform.
useEffect(() => {
// Owner of Audio instance changed
if (!isActive) {
window.log.info('MessageAudio: pausing old owner', id);
setIsPlaying(false);
setCurrentTime(0);
return noop;
}
const onTimeUpdate = () => {
setCurrentTime(audio.currentTime);
};
const onEnded = () => {
window.log.info('MessageAudio: ended, changing UI', id);
setIsPlaying(false);
setCurrentTime(0);
};
const onLoadedMetadata = () => {
assert(
!Number.isNaN(audio.duration),
'Audio should have definite duration on `loadedmetadata` event'
);
window.log.info('MessageAudio: `loadedmetadata` event', id);
// Sync-up audio's time in case if <audio/> loaded its source after
// user clicked on waveform
audio.currentTime = currentTime;
};
audio.addEventListener('timeupdate', onTimeUpdate);
audio.addEventListener('ended', onEnded);
audio.addEventListener('loadedmetadata', onLoadedMetadata);
return () => {
audio.removeEventListener('timeupdate', onTimeUpdate);
audio.removeEventListener('ended', onEnded);
audio.removeEventListener('loadedmetadata', onLoadedMetadata);
};
}, [id, audio, isActive, currentTime]);
// This effect detects `isPlaying` changes and starts/pauses playback when
// needed (+keeps waveform position and audio position in sync).
useEffect(() => {
if (!isActive) {
return;
}
if (isPlaying) {
if (!audio.paused) {
return;
}
window.log.info('MessageAudio: resuming playback for', id);
audio.currentTime = currentTime;
audio.play().catch(error => {
window.log.info('MessageAudio: resume error', id, error.stack || error);
});
} else {
window.log.info('MessageAudio: pausing playback for', id);
audio.pause();
}
}, [id, audio, isActive, isPlaying, currentTime]);
const toggleIsPlaying = () => {
setIsPlaying(!isPlaying);
if (!isActive && !isPlaying) {
window.log.info('MessageAudio: changing owner', id);
setActiveAudioID(id);
// Pause old audio
if (!audio.paused) {
audio.pause();
}
audio.src = url;
}
};
// Clicking button toggle playback
const onClick = (event: React.MouseEvent) => {
event.stopPropagation();
event.preventDefault();
toggleIsPlaying();
};
// Keyboard playback toggle
const onKeyDown = (event: React.KeyboardEvent) => {
if (event.key !== 'Enter' && event.key !== 'Space') {
return;
}
event.stopPropagation();
event.preventDefault();
toggleIsPlaying();
};
// Clicking waveform moves playback head position and starts playback.
const onWaveformClick = (event: React.MouseEvent) => {
event.preventDefault();
event.stopPropagation();
if (!isPlaying) {
toggleIsPlaying();
}
if (!waveformRef.current) {
return;
}
const boundingRect = waveformRef.current.getBoundingClientRect();
const progress = (event.pageX - boundingRect.left) / boundingRect.width;
if (isPlaying && !Number.isNaN(audio.duration)) {
audio.currentTime = audio.duration * progress;
} else {
setCurrentTime(duration * progress);
}
};
// Keyboard navigation for waveform. Pressing keys moves playback head
// forward/backwards.
const onWaveformKeyDown = (event: React.KeyboardEvent) => {
let increment: number;
if (event.key === 'ArrowRight' || event.key === 'ArrowUp') {
increment = +SMALL_INCREMENT;
} else if (event.key === 'ArrowLeft' || event.key === 'ArrowDown') {
increment = -SMALL_INCREMENT;
} else if (event.key === 'PageUp') {
increment = +BIG_INCREMENT;
} else if (event.key === 'PageDown') {
increment = -BIG_INCREMENT;
} else {
// We don't handle other keys
return;
}
event.preventDefault();
event.stopPropagation();
// There is no audio to rewind
if (!isActive) {
return;
}
audio.currentTime = Math.min(
Number.isNaN(audio.duration) ? Infinity : audio.duration,
Math.max(0, audio.currentTime + increment)
);
if (!isPlaying) {
toggleIsPlaying();
}
};
const buttonLabel = i18n(
isPlaying ? 'MessageAudio--play' : 'MessageAudio--pause'
);
const peakPosition = peaks.length * (currentTime / duration);
return (
<div
className={classNames(
CSS_BASE,
`${CSS_BASE}--${direction}`,
withContentBelow ? `${CSS_BASE}--with-content-below` : null,
withContentAbove ? `${CSS_BASE}--with-content-above` : null
)}
>
<button
type="button"
className={classNames(
`${CSS_BASE}__button`,
`${CSS_BASE}__button--${isPlaying ? 'pause' : 'play'}`
)}
ref={buttonRef}
onClick={onClick}
onKeyDown={onKeyDown}
tabIndex={0}
aria-label={buttonLabel}
/>
<div
ref={waveformRef}
className={`${CSS_BASE}__waveform`}
onClick={onWaveformClick}
onKeyDown={onWaveformKeyDown}
tabIndex={0}
role="slider"
aria-label={i18n('MessageAudio--slider')}
aria-orientation="horizontal"
aria-valuenow={currentTime}
aria-valuemin={0}
aria-valuemax={duration}
aria-valuetext={timeToText(currentTime)}
>
{peaks.map((peak, i) => {
let height = Math.max(BAR_MIN_HEIGHT, BAR_MAX_HEIGHT * peak);
const highlight = i < peakPosition;
// Use maximum height for current audio position
if (highlight && i + 1 >= peakPosition) {
height = BAR_MAX_HEIGHT;
}
const key = i;
return (
<div
className={classNames([
`${CSS_BASE}__waveform__bar`,
highlight ? `${CSS_BASE}__waveform__bar--active` : null,
])}
key={key}
style={{ height }}
/>
);
})}
</div>
<div className={`${CSS_BASE}__duration`}>{timeToText(duration)}</div>
</div>
);
};

View file

@ -41,6 +41,7 @@ const defaultMessage: MessageProps = {
previews: [],
reactToMessage: () => null,
renderEmojiPicker: () => <div />,
renderAudioAttachment: () => <div>*AudioAttachment*</div>,
replyToMessage: () => null,
retrySend: () => null,
scrollToQuotedMessage: () => null,

View file

@ -44,6 +44,7 @@ const defaultMessageProps: MessagesProps = {
previews: [],
reactToMessage: () => null,
renderEmojiPicker: () => <div />,
renderAudioAttachment: () => <div>*AudioAttachment*</div>,
replyToMessage: () => null,
retrySend: () => null,
scrollToQuotedMessage: () => null,

View file

@ -269,6 +269,7 @@ const renderItem = (id: string) => (
conversationId=""
conversationAccepted
renderContact={() => '*ContactName*'}
renderAudioAttachment={() => <div>*AudioAttachment*</div>}
{...actions()}
/>
);

View file

@ -14,6 +14,8 @@ import {
import { ScrollDownButton } from './ScrollDownButton';
import { GlobalAudioProvider } from '../GlobalAudioContext';
import { LocalizerType } from '../../types/Util';
import { ConversationType } from '../../state/ducks/conversations';
@ -1087,6 +1089,44 @@ export class Timeline extends React.PureComponent<PropsType, StateType> {
return null;
}
const autoSizer = (
<AutoSizer>
{({ height, width }) => {
if (this.mostRecentWidth && this.mostRecentWidth !== width) {
this.resizeFlag = true;
setTimeout(this.resize, 0);
} else if (
this.mostRecentHeight &&
this.mostRecentHeight !== height
) {
setTimeout(this.onHeightOnlyChange, 0);
}
this.mostRecentWidth = width;
this.mostRecentHeight = height;
return (
<List
deferredMeasurementCache={this.cellSizeCache}
height={height}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
onScroll={this.onScroll as any}
overscanRowCount={10}
ref={this.listRef}
rowCount={rowCount}
rowHeight={this.cellSizeCache.rowHeight}
rowRenderer={this.rowRenderer}
scrollToAlignment="start"
scrollToIndex={scrollToIndex}
tabIndex={-1}
width={width}
/>
);
}}
</AutoSizer>
);
return (
<>
<div
@ -1099,41 +1139,9 @@ export class Timeline extends React.PureComponent<PropsType, StateType> {
onBlur={this.handleBlur}
onKeyDown={this.handleKeyDown}
>
<AutoSizer>
{({ height, width }) => {
if (this.mostRecentWidth && this.mostRecentWidth !== width) {
this.resizeFlag = true;
setTimeout(this.resize, 0);
} else if (
this.mostRecentHeight &&
this.mostRecentHeight !== height
) {
setTimeout(this.onHeightOnlyChange, 0);
}
this.mostRecentWidth = width;
this.mostRecentHeight = height;
return (
<List
deferredMeasurementCache={this.cellSizeCache}
height={height}
// eslint-disable-next-line @typescript-eslint/no-explicit-any
onScroll={this.onScroll as any}
overscanRowCount={10}
ref={this.listRef}
rowCount={rowCount}
rowHeight={this.cellSizeCache.rowHeight}
rowRenderer={this.rowRenderer}
scrollToAlignment="start"
scrollToIndex={scrollToIndex}
tabIndex={-1}
width={width}
/>
);
}}
</AutoSizer>
<GlobalAudioProvider conversationId={id}>
{autoSizer}
</GlobalAudioProvider>
{shouldShowScrollDownButton ? (
<ScrollDownButton
conversationId={id}

View file

@ -70,6 +70,7 @@ const getDefaultProps = () => ({
renderContact,
renderEmojiPicker,
renderAudioAttachment: () => <div>*AudioAttachment*</div>,
});
storiesOf('Components/Conversation/TimelineItem', module)

View file

@ -145,7 +145,7 @@ type PropsActionsType = MessageActionsType &
export type PropsType = PropsLocalType &
PropsActionsType &
Pick<AllMessageProps, 'renderEmojiPicker'>;
Pick<AllMessageProps, 'renderEmojiPicker' | 'renderAudioAttachment'>;
export class TimelineItem extends React.PureComponent<PropsType> {
public render(): JSX.Element | null {

View file

@ -834,7 +834,10 @@ export class MessageModel extends window.Backbone.Model<MessageAttributesType> {
}
// Note: interactionMode is mixed in via selectors/conversations._messageSelector
getPropsForMessage(): Omit<PropsData, 'interactionMode'> {
getPropsForMessage(): Omit<
PropsData,
'interactionMode' | 'renderAudioAttachment'
> {
const sourceId = this.getContactId();
const contact = this.findAndFormatContact(sourceId);
const contactModel = this.findContact(sourceId);

View file

@ -1,6 +1,7 @@
// Copyright 2019-2020 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { actions as audioPlayer } from './ducks/audioPlayer';
import { actions as calling } from './ducks/calling';
import { actions as conversations } from './ducks/conversations';
import { actions as emojis } from './ducks/emojis';
@ -14,6 +15,7 @@ import { actions as updates } from './ducks/updates';
import { actions as user } from './ducks/user';
export const mapDispatchToProps = {
...audioPlayer,
...calling,
...conversations,
...emojis,

View file

@ -0,0 +1,70 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { useBoundActions } from '../../util/hooks';
import { SwitchToAssociatedViewActionType } from './conversations';
// State
export type AudioPlayerStateType = {
readonly activeAudioID: string | undefined;
};
// Actions
type SetActiveAudioIDAction = {
type: 'audioPlayer/SET_ACTIVE_AUDIO_ID';
payload: {
id: string | undefined;
};
};
type AudioPlayerActionType = SetActiveAudioIDAction;
// Action Creators
export const actions = {
setActiveAudioID,
};
export const useActions = (): typeof actions => useBoundActions(actions);
function setActiveAudioID(id: string | undefined): SetActiveAudioIDAction {
return {
type: 'audioPlayer/SET_ACTIVE_AUDIO_ID',
payload: { id },
};
}
// Reducer
function getEmptyState(): AudioPlayerStateType {
return {
activeAudioID: undefined,
};
}
export function reducer(
state: Readonly<AudioPlayerStateType> = getEmptyState(),
action: Readonly<AudioPlayerActionType | SwitchToAssociatedViewActionType>
): AudioPlayerStateType {
if (action.type === 'audioPlayer/SET_ACTIVE_AUDIO_ID') {
const { payload } = action;
return {
...state,
activeAudioID: payload.id,
};
}
// Reset activeAudioID on conversation change.
if (action.type === 'SWITCH_TO_ASSOCIATED_VIEW') {
return {
...state,
activeAudioID: undefined,
};
}
return state;
}

View file

@ -3,6 +3,7 @@
import { combineReducers } from 'redux';
import { reducer as audioPlayer } from './ducks/audioPlayer';
import { reducer as calling } from './ducks/calling';
import { reducer as conversations } from './ducks/conversations';
import { reducer as emojis } from './ducks/emojis';
@ -16,6 +17,7 @@ import { reducer as updates } from './ducks/updates';
import { reducer as user } from './ducks/user';
export const reducer = combineReducers({
audioPlayer,
calling,
conversations,
emojis,

View file

@ -0,0 +1,36 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { connect } from 'react-redux';
import { MessageAudio } from '../../components/conversation/MessageAudio';
import { mapDispatchToProps } from '../actions';
import { StateType } from '../reducer';
import { WaveformCache } from '../../types/Audio';
import { LocalizerType } from '../../types/Util';
export type Props = {
audio: HTMLAudioElement;
audioContext: AudioContext;
waveformCache: WaveformCache;
direction?: 'incoming' | 'outgoing';
id: string;
i18n: LocalizerType;
url: string;
withContentAbove: boolean;
withContentBelow: boolean;
buttonRef: React.RefObject<HTMLButtonElement>;
};
const mapStateToProps = (state: StateType, props: Props) => {
return {
...props,
...state.audioPlayer,
};
};
const smart = connect(mapStateToProps, mapDispatchToProps);
export const SmartMessageAudio = smart(MessageAudio);

View file

@ -5,6 +5,7 @@ import { pick } from 'lodash';
import React from 'react';
import { connect } from 'react-redux';
import { mapDispatchToProps } from '../actions';
import { GlobalAudioContext } from '../../components/GlobalAudioContext';
import { Timeline } from '../../components/conversation/Timeline';
import { RenderEmojiPickerProps } from '../../components/conversation/ReactionPicker';
import { StateType } from '../reducer';
@ -23,6 +24,7 @@ import { SmartLastSeenIndicator } from './LastSeenIndicator';
import { SmartHeroRow } from './HeroRow';
import { SmartTimelineLoadingRow } from './TimelineLoadingRow';
import { SmartEmojiPicker } from './EmojiPicker';
import { SmartMessageAudio, Props as MessageAudioProps } from './MessageAudio';
// Workaround: A react component's required properties are filtering up through connect()
// https://github.com/DefinitelyTyped/DefinitelyTyped/issues/31363
@ -41,6 +43,11 @@ type ExternalProps = {
// are provided by ConversationView in setupTimeline().
};
type AudioAttachmentProps = Omit<
MessageAudioProps,
'audio' | 'audioContext' | 'waveformCache'
>;
function renderItem(
messageId: string,
conversationId: string,
@ -52,9 +59,25 @@ function renderItem(
conversationId={conversationId}
id={messageId}
renderEmojiPicker={renderEmojiPicker}
renderAudioAttachment={renderAudioAttachment}
/>
);
}
function renderAudioAttachment(props: AudioAttachmentProps) {
return (
<GlobalAudioContext.Consumer>
{globalAudioProps => {
return (
globalAudioProps && (
<SmartMessageAudio {...props} {...globalAudioProps} />
)
);
}}
</GlobalAudioContext.Consumer>
);
}
function renderEmojiPicker({
ref,
onPickEmoji,

View file

@ -1,6 +1,7 @@
// Copyright 2020 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { actions as audioPlayer } from './ducks/audioPlayer';
import { actions as calling } from './ducks/calling';
import { actions as conversations } from './ducks/conversations';
import { actions as emojis } from './ducks/emojis';
@ -14,6 +15,7 @@ import { actions as updates } from './ducks/updates';
import { actions as user } from './ducks/user';
export type ReduxActions = {
audioPlayer: typeof audioPlayer;
calling: typeof calling;
conversations: typeof conversations;
emojis: typeof emojis;

View file

@ -0,0 +1,25 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { assert } from 'chai';
import { actions } from '../../../state/ducks/audioPlayer';
import { noopAction } from '../../../state/ducks/noop';
import { StateType, reducer as rootReducer } from '../../../state/reducer';
describe('both/state/selectors/search', () => {
const getEmptyRootState = (): StateType => {
return rootReducer(undefined, noopAction());
};
describe('setActiveAudioId', () => {
it("updates `activeAudioID` in the audioPlayer's state", () => {
const state = getEmptyRootState();
assert.strictEqual(state.audioPlayer.activeAudioID, undefined);
const updated = rootReducer(state, actions.setActiveAudioID('test'));
assert.strictEqual(updated.audioPlayer.activeAudioID, 'test');
});
});
});

12
ts/types/Audio.tsx Normal file
View file

@ -0,0 +1,12 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import LRU from 'lru-cache';
export type WaveformCache = LRU<
string,
{
duration: number;
peaks: ReadonlyArray<number>;
}
>;

View file

@ -14944,19 +14944,20 @@
{
"rule": "React-createRef",
"path": "ts/components/conversation/Message.js",
"line": " this.audioRef = react_1.default.createRef();",
"line": " this.focusRef = react_1.default.createRef();",
"lineNumber": 73,
"reasonCategory": "usageTrusted",
"updated": "2020-08-28T16:12:19.904Z"
"updated": "2021-03-05T20:05:07.474Z",
"reasonDetail": "Used for managing focus only"
},
{
"rule": "React-createRef",
"path": "ts/components/conversation/Message.js",
"line": " this.focusRef = react_1.default.createRef();",
"line": " this.audioButtonRef = react_1.default.createRef();",
"lineNumber": 74,
"reasonCategory": "usageTrusted",
"updated": "2020-09-11T17:24:56.124Z",
"reasonDetail": "Used for managing focus only"
"updated": "2021-03-05T20:05:07.474Z",
"reasonDetail": "Used for propagating click from the Message to MessageAudio's button"
},
{
"rule": "React-createRef",
@ -14970,26 +14971,38 @@
{
"rule": "React-createRef",
"path": "ts/components/conversation/Message.tsx",
"line": " public audioRef: React.RefObject<HTMLAudioElement> = React.createRef();",
"lineNumber": 222,
"line": " public focusRef: React.RefObject<HTMLDivElement> = React.createRef();",
"lineNumber": 235,
"reasonCategory": "usageTrusted",
"updated": "2020-09-08T20:19:01.913Z"
"updated": "2021-03-05T19:57:01.431Z",
"reasonDetail": "Used for managing focus only"
},
{
"rule": "React-createRef",
"path": "ts/components/conversation/Message.tsx",
"line": " public focusRef: React.RefObject<HTMLDivElement> = React.createRef();",
"lineNumber": 224,
"line": " public audioButtonRef: React.RefObject<HTMLButtonElement> = React.createRef();",
"lineNumber": 237,
"reasonCategory": "usageTrusted",
"updated": "2020-09-08T20:19:01.913Z"
"updated": "2021-03-05T19:57:01.431Z",
"reasonDetail": "Used for propagating click from the Message to MessageAudio's button"
},
{
"rule": "React-createRef",
"path": "ts/components/conversation/Message.tsx",
"line": " > = React.createRef();",
"lineNumber": 228,
"lineNumber": 241,
"reasonCategory": "usageTrusted",
"updated": "2020-08-28T19:36:40.817Z"
"updated": "2021-03-05T19:57:01.431Z",
"reasonDetail": "Used for detecting clicks outside reaction viewer"
},
{
"rule": "React-useRef",
"path": "ts/components/conversation/MessageAudio.js",
"line": " const waveformRef = react_1.useRef(null);",
"lineNumber": 116,
"reasonCategory": "usageTrusted",
"updated": "2021-03-09T01:19:04.057Z",
"reasonDetail": "Used for obtanining the bounding box for the container"
},
{
"rule": "React-createRef",
@ -15040,7 +15053,7 @@
"rule": "React-createRef",
"path": "ts/components/conversation/Timeline.js",
"line": " this.listRef = react_1.default.createRef();",
"lineNumber": 32,
"lineNumber": 33,
"reasonCategory": "usageTrusted",
"updated": "2019-07-31T00:19:18.696Z",
"reasonDetail": "Timeline needs to interact with its child List directly"

View file

@ -2318,6 +2318,11 @@
resolved "https://registry.yarnpkg.com/@types/long/-/long-4.0.1.tgz#459c65fa1867dafe6a8f322c4c51695663cc55e9"
integrity sha512-5tXH6Bx/kNGd3MgffdmP4dy2Z+G4eaXw0SE81Tq3BNadtnMR5/ySMzX4SLEzHJzSmPNn4HIdpQsBvXMUykr58w==
"@types/lru-cache@5.1.0":
version "5.1.0"
resolved "https://registry.yarnpkg.com/@types/lru-cache/-/lru-cache-5.1.0.tgz#57f228f2b80c046b4a1bd5cac031f81f207f4f03"
integrity sha512-RaE0B+14ToE4l6UqdarKPnXwVDuigfFv+5j9Dze/Nqr23yyuqdNvzcZi3xB+3Agvi5R4EOgAksfv3lXX4vBt9w==
"@types/memoizee@0.4.2":
version "0.4.2"
resolved "https://registry.yarnpkg.com/@types/memoizee/-/memoizee-0.4.2.tgz#a500158999a8144a9b46cf9a9fb49b15f1853573"
@ -10525,6 +10530,13 @@ lowlight@~1.9.1:
fault "^1.0.2"
highlight.js "~9.12.0"
lru-cache@6.0.0, lru-cache@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94"
integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==
dependencies:
yallist "^4.0.0"
lru-cache@^4.0.0, lru-cache@^4.0.1:
version "4.1.5"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-4.1.5.tgz#8bbe50ea85bed59bc9e33dcab8235ee9bcf443cd"
@ -10540,13 +10552,6 @@ lru-cache@^5.1.1:
dependencies:
yallist "^3.0.2"
lru-cache@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/lru-cache/-/lru-cache-6.0.0.tgz#6d6fe6570ebd96aaf90fcad1dafa3b2566db3a94"
integrity sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==
dependencies:
yallist "^4.0.0"
lru-queue@0.1:
version "0.1.0"
resolved "https://registry.yarnpkg.com/lru-queue/-/lru-queue-0.1.0.tgz#2738bd9f0d3cf4f84490c5736c48699ac632cda3"