Voice notes drafts

This commit is contained in:
Alvaro 2023-03-02 13:55:40 -07:00 committed by GitHub
parent 356fb301e1
commit 99015d7b96
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
48 changed files with 2113 additions and 909 deletions

View file

@ -1,85 +0,0 @@
// Copyright 2020 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import * as React from 'react';
import { action } from '@storybook/addon-actions';
import { select } from '@storybook/addon-knobs';
import {
ErrorDialogAudioRecorderType,
RecordingState,
} from '../../types/AudioRecorder';
import type { PropsType } from './AudioCapture';
import { AudioCapture } from './AudioCapture';
import { setupI18n } from '../../util/setupI18n';
import enMessages from '../../../_locales/en/messages.json';
const i18n = setupI18n('en', enMessages);
export default {
title: 'Components/Conversation/AudioCapture',
};
const createProps = (overrideProps: Partial<PropsType> = {}): PropsType => ({
cancelRecording: action('cancelRecording'),
completeRecording: action('completeRecording'),
conversationId: '123',
draftAttachments: [],
errorDialogAudioRecorderType: overrideProps.errorDialogAudioRecorderType,
errorRecording: action('errorRecording'),
i18n,
recordingState: select(
'recordingState',
RecordingState,
overrideProps.recordingState || RecordingState.Idle
),
onSendAudioRecording: action('onSendAudioRecording'),
startRecording: action('startRecording'),
});
export function Default(): JSX.Element {
return <AudioCapture {...createProps()} />;
}
export const _Initializing = (): JSX.Element => {
return (
<AudioCapture
{...createProps({
recordingState: RecordingState.Initializing,
})}
/>
);
};
export const _Recording = (): JSX.Element => {
return (
<AudioCapture
{...createProps({
recordingState: RecordingState.Recording,
})}
/>
);
};
export function VoiceLimit(): JSX.Element {
return (
<AudioCapture
{...createProps({
errorDialogAudioRecorderType: ErrorDialogAudioRecorderType.Timeout,
recordingState: RecordingState.Recording,
})}
/>
);
}
export function SwitchedApps(): JSX.Element {
return (
<AudioCapture
{...createProps({
errorDialogAudioRecorderType: ErrorDialogAudioRecorderType.Blur,
recordingState: RecordingState.Recording,
})}
/>
);
}

View file

@ -1,100 +1,30 @@
// Copyright 2016 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import React, { useCallback, useEffect, useState } from 'react';
import * as moment from 'moment';
import { noop } from 'lodash';
import React, { useCallback, useState } from 'react';
import type {
AttachmentDraftType,
InMemoryAttachmentDraftType,
} from '../../types/Attachment';
import { ConfirmationDialog } from '../ConfirmationDialog';
import type { AttachmentDraftType } from '../../types/Attachment';
import type { LocalizerType } from '../../types/Util';
import { ToastVoiceNoteLimit } from '../ToastVoiceNoteLimit';
import { ToastVoiceNoteMustBeOnlyAttachment } from '../ToastVoiceNoteMustBeOnlyAttachment';
import { useEscapeHandling } from '../../hooks/useEscapeHandling';
import {
useStartRecordingShortcut,
useKeyboardShortcuts,
} from '../../hooks/useKeyboardShortcuts';
import {
ErrorDialogAudioRecorderType,
RecordingState,
} from '../../types/AudioRecorder';
type OnSendAudioRecordingType = (rec: InMemoryAttachmentDraftType) => unknown;
export type PropsType = {
cancelRecording: () => unknown;
conversationId: string;
completeRecording: (
conversationId: string,
onSendAudioRecording?: OnSendAudioRecordingType
) => unknown;
draftAttachments: ReadonlyArray<AttachmentDraftType>;
errorDialogAudioRecorderType?: ErrorDialogAudioRecorderType;
errorRecording: (e: ErrorDialogAudioRecorderType) => unknown;
i18n: LocalizerType;
recordingState: RecordingState;
onSendAudioRecording: OnSendAudioRecordingType;
startRecording: (id: string) => unknown;
};
enum ToastType {
VoiceNoteLimit,
VoiceNoteMustBeOnlyAttachment,
}
const START_DURATION_TEXT = '0:00';
export function AudioCapture({
cancelRecording,
completeRecording,
conversationId,
draftAttachments,
errorDialogAudioRecorderType,
errorRecording,
i18n,
recordingState,
onSendAudioRecording,
startRecording,
}: PropsType): JSX.Element {
const [durationText, setDurationText] = useState<string>(START_DURATION_TEXT);
const [toastType, setToastType] = useState<ToastType | undefined>();
// Cancel recording if we switch away from this conversation, unmounting
useEffect(() => {
return () => {
cancelRecording();
};
}, [cancelRecording]);
// Stop recording and show confirmation if user switches away from this app
useEffect(() => {
if (recordingState !== RecordingState.Recording) {
return;
}
const handler = () => {
errorRecording(ErrorDialogAudioRecorderType.Blur);
};
window.addEventListener('blur', handler);
return () => {
window.removeEventListener('blur', handler);
};
}, [recordingState, completeRecording, errorRecording]);
const escapeRecording = useCallback(() => {
if (recordingState !== RecordingState.Recording) {
return;
}
cancelRecording();
}, [cancelRecording, recordingState]);
useEscapeHandling(escapeRecording);
const [showOnlyAttachmentToast, setShowOnlyAttachmentToast] = useState(false);
const recordConversation = useCallback(
() => startRecording(conversationId),
@ -103,156 +33,40 @@ export function AudioCapture({
const startRecordingShortcut = useStartRecordingShortcut(recordConversation);
useKeyboardShortcuts(startRecordingShortcut);
const closeToast = useCallback(() => {
setToastType(undefined);
const handleCloseToast = useCallback(() => {
setShowOnlyAttachmentToast(false);
}, []);
// Update timestamp regularly, then timeout if recording goes over five minutes
useEffect(() => {
if (recordingState !== RecordingState.Recording) {
return;
const handleClick = useCallback(() => {
if (draftAttachments.length) {
setShowOnlyAttachmentToast(true);
} else {
startRecording(conversationId);
}
setDurationText(START_DURATION_TEXT);
setToastType(ToastType.VoiceNoteLimit);
const startTime = Date.now();
const interval = setInterval(() => {
const duration = moment.duration(Date.now() - startTime, 'ms');
const minutes = `${Math.trunc(duration.asMinutes())}`;
let seconds = `${duration.seconds()}`;
if (seconds.length < 2) {
seconds = `0${seconds}`;
}
setDurationText(`${minutes}:${seconds}`);
if (duration >= moment.duration(1, 'hours')) {
errorRecording(ErrorDialogAudioRecorderType.Timeout);
}
}, 1000);
return () => {
clearInterval(interval);
closeToast();
};
}, [
closeToast,
completeRecording,
errorRecording,
recordingState,
setDurationText,
conversationId,
draftAttachments,
setShowOnlyAttachmentToast,
startRecording,
]);
const clickCancel = useCallback(() => {
cancelRecording();
}, [cancelRecording]);
const clickSend = useCallback(() => {
completeRecording(conversationId, onSendAudioRecording);
}, [conversationId, completeRecording, onSendAudioRecording]);
let toastElement: JSX.Element | undefined;
if (toastType === ToastType.VoiceNoteLimit) {
toastElement = <ToastVoiceNoteLimit i18n={i18n} onClose={closeToast} />;
} else if (toastType === ToastType.VoiceNoteMustBeOnlyAttachment) {
toastElement = (
<ToastVoiceNoteMustBeOnlyAttachment i18n={i18n} onClose={closeToast} />
);
}
let confirmationDialog: JSX.Element | undefined;
if (
errorDialogAudioRecorderType === ErrorDialogAudioRecorderType.Blur ||
errorDialogAudioRecorderType === ErrorDialogAudioRecorderType.Timeout
) {
const confirmationDialogText =
errorDialogAudioRecorderType === ErrorDialogAudioRecorderType.Blur
? i18n('voiceRecordingInterruptedBlur')
: i18n('voiceRecordingInterruptedMax');
confirmationDialog = (
<ConfirmationDialog
dialogName="AudioCapture.sendAnyway"
i18n={i18n}
onCancel={clickCancel}
onClose={noop}
cancelText={i18n('discard')}
actions={[
{
text: i18n('sendAnyway'),
style: 'affirmative',
action: clickSend,
},
]}
>
{confirmationDialogText}
</ConfirmationDialog>
);
} else if (
errorDialogAudioRecorderType === ErrorDialogAudioRecorderType.ErrorRecording
) {
confirmationDialog = (
<ConfirmationDialog
dialogName="AudioCapture.error"
i18n={i18n}
onCancel={clickCancel}
onClose={noop}
cancelText={i18n('ok')}
actions={[]}
>
{i18n('voiceNoteError')}
</ConfirmationDialog>
);
}
if (recordingState === RecordingState.Recording && !confirmationDialog) {
return (
<>
<div className="AudioCapture">
<button
className="AudioCapture__recorder-button AudioCapture__recorder-button--complete"
onClick={clickSend}
tabIndex={0}
title={i18n('voiceRecording--complete')}
type="button"
>
<span className="icon" />
</button>
<span className="AudioCapture__time">{durationText}</span>
<button
className="AudioCapture__recorder-button AudioCapture__recorder-button--cancel"
onClick={clickCancel}
tabIndex={0}
title={i18n('voiceRecording--cancel')}
type="button"
>
<span className="icon" />
</button>
</div>
{toastElement}
</>
);
}
return (
<>
<div className="AudioCapture">
<button
aria-label={i18n('voiceRecording--start')}
className="AudioCapture__microphone"
onClick={() => {
if (draftAttachments.length) {
setToastType(ToastType.VoiceNoteMustBeOnlyAttachment);
} else {
startRecording(conversationId);
}
}}
onClick={handleClick}
title={i18n('voiceRecording--start')}
type="button"
/>
{confirmationDialog}
</div>
{toastElement}
{showOnlyAttachmentToast && (
<ToastVoiceNoteMustBeOnlyAttachment
i18n={i18n}
onClose={handleCloseToast}
/>
)}
</>
);
}

View file

@ -1,7 +1,7 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import React, { useCallback, useRef, useEffect, useState } from 'react';
import React, { useCallback } from 'react';
import type { RefObject } from 'react';
import classNames from 'classnames';
import { noop } from 'lodash';
@ -18,6 +18,9 @@ import { MessageMetadata } from './MessageMetadata';
import * as log from '../../logging/log';
import type { ActiveAudioPlayerStateType } from '../../state/ducks/audioPlayer';
import { PlaybackRateButton } from '../PlaybackRateButton';
import { PlaybackButton } from '../PlaybackButton';
import { WaveformScrubber } from './WaveformScrubber';
import { useComputePeaks } from '../../hooks/useComputePeaks';
import { durationToPlaybackText } from '../../util/durationToPlaybackText';
export type OwnProps = Readonly<{
@ -58,15 +61,6 @@ export type DispatchProps = Readonly<{
export type Props = OwnProps & DispatchProps;
type ButtonProps = {
mod?: string;
label: string;
visible?: boolean;
onClick: () => void;
onMouseDown?: () => void;
onMouseUp?: () => void;
};
enum State {
NotDownloaded = 'NotDownloaded',
Pending = 'Pending',
@ -82,12 +76,6 @@ const BAR_NOT_DOWNLOADED_HEIGHT = 2;
const BAR_MIN_HEIGHT = 4;
const BAR_MAX_HEIGHT = 20;
const REWIND_BAR_COUNT = 2;
// Increments for keyboard audio seek (in seconds)
const SMALL_INCREMENT = 1;
const BIG_INCREMENT = 5;
const SPRING_CONFIG = {
mass: 0.5,
tension: 350,
@ -97,62 +85,6 @@ const SPRING_CONFIG = {
const DOT_DIV_WIDTH = 14;
/** Handles animations, key events, and stopping event propagation */
const PlaybackButton = React.forwardRef<HTMLButtonElement, ButtonProps>(
function ButtonInner(props, ref) {
const { mod, label, onClick, visible = true } = props;
const [animProps] = useSpring(
{
config: SPRING_CONFIG,
to: { scale: visible ? 1 : 0 },
},
[visible]
);
// Clicking button toggle playback
const onButtonClick = useCallback(
(event: React.MouseEvent) => {
event.stopPropagation();
event.preventDefault();
onClick();
},
[onClick]
);
// Keyboard playback toggle
const onButtonKeyDown = useCallback(
(event: React.KeyboardEvent) => {
if (event.key !== 'Enter' && event.key !== 'Space') {
return;
}
event.stopPropagation();
event.preventDefault();
onClick();
},
[onClick]
);
return (
<animated.div style={animProps}>
<button
type="button"
ref={ref}
className={classNames(
`${CSS_BASE}__play-button`,
mod ? `${CSS_BASE}__play-button--${mod}` : undefined
)}
onClick={onButtonClick}
onKeyDown={onButtonKeyDown}
tabIndex={0}
aria-label={label}
/>
</animated.div>
);
}
);
function PlayedDot({
played,
onHide,
@ -222,7 +154,6 @@ export function MessageAudio(props: Props): JSX.Element {
kickOffAttachmentDownload,
onCorrupted,
computePeaks,
setPlaybackRate,
onPlayMessage,
pushPanelForConversation,
@ -230,21 +161,18 @@ export function MessageAudio(props: Props): JSX.Element {
setIsPlaying,
} = props;
const waveformRef = useRef<HTMLDivElement | null>(null);
const isPlaying = active?.playing ?? false;
const [isPlayedDotVisible, setIsPlayedDotVisible] = React.useState(!played);
// if it's playing, use the duration passed as props as it might
// change during loading/playback (?)
// NOTE: Avoid division by zero
const [duration, setDuration] = useState(active?.duration ?? 1e-23);
const audioUrl = isDownloaded(attachment) ? attachment.url : undefined;
const [hasPeaks, setHasPeaks] = useState(false);
const [peaks, setPeaks] = useState<ReadonlyArray<number>>(
new Array(BAR_COUNT).fill(0)
);
const { duration, hasPeaks, peaks } = useComputePeaks({
audioUrl,
activeDuration: active?.duration,
barCount: BAR_COUNT,
onCorrupted,
});
let state: State;
@ -258,60 +186,7 @@ export function MessageAudio(props: Props): JSX.Element {
state = State.Normal;
}
// This effect loads audio file and computes its RMS peak for displaying the
// waveform.
useEffect(() => {
if (state !== State.Computing) {
return noop;
}
log.info('MessageAudio: loading audio and computing waveform');
let canceled = false;
void (async () => {
try {
if (!attachment.url) {
throw new Error(
'Expected attachment url in the MessageAudio with ' +
`state: ${state}`
);
}
const { peaks: newPeaks, duration: newDuration } = await computePeaks(
attachment.url,
BAR_COUNT
);
if (canceled) {
return;
}
setPeaks(newPeaks);
setHasPeaks(true);
setDuration(Math.max(newDuration, 1e-23));
} catch (err) {
log.error(
'MessageAudio: computePeaks error, marking as corrupted',
err
);
onCorrupted();
}
})();
return () => {
canceled = true;
};
}, [
attachment,
computePeaks,
setDuration,
setPeaks,
setHasPeaks,
onCorrupted,
state,
]);
const toggleIsPlaying = () => {
const toggleIsPlaying = useCallback(() => {
if (!isPlaying) {
if (!attachment.url) {
throw new Error(
@ -328,144 +203,96 @@ export function MessageAudio(props: Props): JSX.Element {
} else {
setIsPlaying(false);
}
};
// Clicking waveform moves playback head position and starts playback.
const onWaveformClick = (event: React.MouseEvent) => {
event.preventDefault();
event.stopPropagation();
if (state !== State.Normal) {
return;
}
if (!waveformRef.current) {
return;
}
const boundingRect = waveformRef.current.getBoundingClientRect();
let progress = (event.pageX - boundingRect.left) / boundingRect.width;
if (progress <= REWIND_BAR_COUNT / BAR_COUNT) {
progress = 0;
}
if (active) {
setPosition(progress);
if (!active.playing) {
setIsPlaying(true);
}
return;
}
if (attachment.url) {
onPlayMessage(id, progress);
} else {
log.warn('Waveform clicked on attachment with no url');
}
};
// Keyboard navigation for waveform. Pressing keys moves playback head
// forward/backwards.
const onWaveformKeyDown = (event: React.KeyboardEvent) => {
let increment: number;
if (event.key === 'ArrowRight' || event.key === 'ArrowUp') {
increment = +SMALL_INCREMENT;
} else if (event.key === 'ArrowLeft' || event.key === 'ArrowDown') {
increment = -SMALL_INCREMENT;
} else if (event.key === 'PageUp') {
increment = +BIG_INCREMENT;
} else if (event.key === 'PageDown') {
increment = -BIG_INCREMENT;
} else {
// We don't handle other keys
return;
}
event.preventDefault();
event.stopPropagation();
// There is no audio to rewind
if (!active) {
return;
}
const currentPosition = active.currentTime / duration;
const positionIncrement = increment / duration;
setPosition(currentPosition + positionIncrement);
if (!isPlaying) {
toggleIsPlaying();
}
};
}, [
isPlaying,
attachment.url,
active,
state,
setIsPlaying,
id,
onPlayMessage,
]);
const currentTimeOrZero = active?.currentTime ?? 0;
const peakPosition = peaks.length * (currentTimeOrZero / duration);
const updatePosition = useCallback(
(newPosition: number) => {
if (active) {
setPosition(newPosition);
if (!active.playing) {
setIsPlaying(true);
}
return;
}
if (attachment.url) {
onPlayMessage(id, newPosition);
} else {
log.warn('Waveform clicked on attachment with no url');
}
},
[active, attachment.url, id, onPlayMessage, setIsPlaying, setPosition]
);
const handleWaveformClick = useCallback(
(positionAsRatio: number) => {
if (state !== State.Normal) {
return;
}
updatePosition(positionAsRatio);
},
[state, updatePosition]
);
const handleWaveformScrub = useCallback(
(amountInSeconds: number) => {
const currentPosition = currentTimeOrZero / duration;
const positionIncrement = amountInSeconds / duration;
updatePosition(
Math.min(Math.max(0, currentPosition + positionIncrement), duration)
);
},
[currentTimeOrZero, duration, updatePosition]
);
const waveform = (
<div
ref={waveformRef}
className={`${CSS_BASE}__waveform`}
onClick={onWaveformClick}
onKeyDown={onWaveformKeyDown}
tabIndex={0}
role="slider"
aria-label={i18n('MessageAudio--slider')}
aria-orientation="horizontal"
aria-valuenow={currentTimeOrZero}
aria-valuemin={0}
aria-valuemax={duration}
aria-valuetext={durationToPlaybackText(currentTimeOrZero)}
>
{peaks.map((peak, i) => {
let height = Math.max(BAR_MIN_HEIGHT, BAR_MAX_HEIGHT * peak);
if (state !== State.Normal) {
height = BAR_NOT_DOWNLOADED_HEIGHT;
}
const highlight = i < peakPosition;
// Use maximum height for current audio position
if (highlight && i + 1 >= peakPosition) {
height = BAR_MAX_HEIGHT;
}
const key = i;
return (
<div
className={classNames([
`${CSS_BASE}__waveform__bar`,
highlight ? `${CSS_BASE}__waveform__bar--active` : null,
])}
key={key}
style={{ height }}
/>
);
})}
</div>
<WaveformScrubber
i18n={i18n}
peaks={peaks}
duration={duration}
currentTime={currentTimeOrZero}
barMinHeight={
state !== State.Normal ? BAR_NOT_DOWNLOADED_HEIGHT : BAR_MIN_HEIGHT
}
barMaxHeight={BAR_MAX_HEIGHT}
onClick={handleWaveformClick}
onScrub={handleWaveformScrub}
/>
);
let button: React.ReactElement;
if (state === State.Pending || state === State.Computing) {
// Not really a button, but who cares?
button = (
<div
className={classNames(
`${CSS_BASE}__spinner`,
`${CSS_BASE}__spinner--pending`
)}
title={i18n('MessageAudio--pending')}
<PlaybackButton
variant="message"
mod="pending"
onClick={noop}
label={i18n('MessageAudio--pending')}
context={direction}
/>
);
} else if (state === State.NotDownloaded) {
button = (
<PlaybackButton
ref={buttonRef}
variant="message"
mod="download"
label="MessageAudio--download"
label={i18n('MessageAudio--download')}
onClick={kickOffAttachmentDownload}
context={direction}
/>
);
} else {
@ -473,11 +300,13 @@ export function MessageAudio(props: Props): JSX.Element {
button = (
<PlaybackButton
ref={buttonRef}
variant="message"
mod={isPlaying ? 'pause' : 'play'}
label={
isPlaying ? i18n('MessageAudio--pause') : i18n('MessageAudio--play')
}
onClick={toggleIsPlaying}
context={direction}
/>
);
}

View file

@ -2,7 +2,7 @@
// SPDX-License-Identifier: AGPL-3.0-only
import * as React from 'react';
import { isBoolean } from 'lodash';
import { isBoolean, noop } from 'lodash';
import { action } from '@storybook/addon-actions';
import { boolean, number, select, text } from '@storybook/addon-knobs';
@ -134,12 +134,14 @@ function MessageAudioContainer({
const [isActive, setIsActive] = React.useState<boolean>(false);
const [currentTime, setCurrentTime] = React.useState<number>(0);
const [playbackRate, setPlaybackRate] = React.useState<number>(1);
const [playing, setPlaying] = React.useState<boolean>(false);
const [isPlaying, setIsPlaying] = React.useState<boolean>(false);
const [_played, setPlayed] = React.useState<boolean>(played);
const audio = React.useMemo(() => {
const audioPlayer = React.useMemo(() => {
const a = new Audio();
let onLoadedData: () => void = noop;
a.addEventListener('timeupdate', () => {
setCurrentTime(a.currentTime);
});
@ -148,54 +150,76 @@ function MessageAudioContainer({
setIsActive(false);
});
a.addEventListener('loadeddata', () => {
a.currentTime = currentTime;
});
a.addEventListener('loadeddata', () => onLoadedData());
return a;
// eslint-disable-next-line react-hooks/exhaustive-deps
function play(positionAsRatio?: number) {
if (positionAsRatio !== undefined) {
a.currentTime = positionAsRatio * a.duration;
}
void a.play();
}
return {
loadAndPlay(url: string, positionAsRatio: number) {
onLoadedData = () => {
play(positionAsRatio);
};
a.src = url;
},
play,
pause() {
a.pause();
},
set playbackRate(rate: number) {
a.playbackRate = rate;
},
set currentTime(value: number) {
a.currentTime = value;
},
get duration() {
return a.duration;
},
};
}, []);
const handlePlayMessage = (id: string, position: number) => {
const handlePlayMessage = (id: string, positionAsRatio: number) => {
if (!active) {
audio.src = messageIdToAudioUrl[id as keyof typeof messageIdToAudioUrl];
audioPlayer.loadAndPlay(
messageIdToAudioUrl[id as keyof typeof messageIdToAudioUrl],
positionAsRatio
);
setIsActive(true);
}
if (!playing) {
void audio.play();
setPlaying(true);
setIsPlaying(true);
setPlayed(true);
}
if (!Number.isNaN(audio.duration)) {
audio.currentTime = audio.duration * position;
}
if (!Number.isNaN(audio.currentTime)) {
setCurrentTime(audio.currentTime);
}
};
const setPlaybackRateAction = (rate: number) => {
audio.playbackRate = rate;
audioPlayer.playbackRate = rate;
setPlaybackRate(rate);
};
const setIsPlayingAction = (value: boolean) => {
if (value) {
void audio.play();
audioPlayer.play();
} else {
audio.pause();
audioPlayer.pause();
}
setPlaying(value);
setIsPlaying(value);
};
const setPosition = (value: number) => {
audio.currentTime = value * audio.duration;
setCurrentTime(audio.currentTime);
audioPlayer.currentTime = value * audioPlayer.duration;
setCurrentTime(audioPlayer.currentTime);
};
const active = isActive
? { playing, playbackRate, currentTime, duration: audio.duration }
? {
playing: isPlaying,
playbackRate,
currentTime,
duration: audioPlayer.duration,
}
: undefined;
return (

View file

@ -0,0 +1,60 @@
// Copyright 2023 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import classNames from 'classnames';
import React from 'react';
import { assertDev } from '../../util/assert';
type Props = {
peaks: ReadonlyArray<number>;
barMinHeight: number;
barMaxHeight: number;
currentTime: number | undefined;
duration: number | undefined;
};
export function Waveform({
peaks,
barMinHeight,
barMaxHeight,
currentTime,
duration,
}: Props): JSX.Element {
const currentTimeOrZero = currentTime ?? 0;
const peakPosition = peaks.length * (currentTimeOrZero / (duration ?? 1e-23));
return (
<div className={classNames(['Waveform'])}>
{peaks.map((peak, i) => {
assertDev(
peak >= 0 && peak <= 1 && !Number.isNaN(peak),
`Peak outside of range: ${peak}`
);
let height = Math.max(barMinHeight, barMaxHeight * peak);
const highlight = i < peakPosition;
// Use maximum height for current audio position
if (highlight && i + 1 >= peakPosition) {
height = barMaxHeight;
}
assertDev(!Number.isNaN(height), 'Got NaN for peak height');
const key = i;
return (
<div
className={classNames([
'Waveform__bar',
highlight ? 'Waveform__bar--active' : null,
])}
key={key}
style={{ height }}
/>
);
})}
</div>
);
}

View file

@ -0,0 +1,123 @@
// Copyright 2023 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import React, { useCallback, useRef } from 'react';
import { useRefMerger } from '../../hooks/useRefMerger';
import type { LocalizerType } from '../../types/Util';
import { durationToPlaybackText } from '../../util/durationToPlaybackText';
import { Waveform } from './Waveform';
type Props = Readonly<{
i18n: LocalizerType;
peaks: ReadonlyArray<number>;
currentTime: number;
duration: number | undefined;
barMinHeight: number;
barMaxHeight: number;
onClick: (positionAsRatio: number) => void;
onScrub: (positionAsRatio: number) => void;
}>;
const BAR_COUNT = 47;
const REWIND_BAR_COUNT = 2;
// Increments for keyboard audio seek (in seconds)\
const SMALL_INCREMENT = 1;
const BIG_INCREMENT = 5;
export const WaveformScrubber = React.forwardRef(function WaveformScrubber(
{
i18n,
peaks,
barMinHeight,
barMaxHeight,
currentTime,
duration,
onClick,
onScrub,
}: Props,
ref
): JSX.Element {
const refMerger = useRefMerger();
const waveformRef = useRef<HTMLDivElement | null>(null);
// Clicking waveform moves playback head position and starts playback.
const handleClick = useCallback(
(event: React.MouseEvent) => {
event.preventDefault();
event.stopPropagation();
if (!waveformRef.current) {
return;
}
const boundingRect = waveformRef.current.getBoundingClientRect();
let progress = (event.pageX - boundingRect.left) / boundingRect.width;
if (progress <= REWIND_BAR_COUNT / BAR_COUNT) {
progress = 0;
}
onClick(progress);
},
[waveformRef, onClick]
);
// Keyboard navigation for waveform. Pressing keys moves playback head
// forward/backwards.
const handleKeyDown = (event: React.KeyboardEvent) => {
if (!duration) {
return;
}
let increment: number;
if (event.key === 'ArrowRight' || event.key === 'ArrowUp') {
increment = +SMALL_INCREMENT;
} else if (event.key === 'ArrowLeft' || event.key === 'ArrowDown') {
increment = -SMALL_INCREMENT;
} else if (event.key === 'PageUp') {
increment = +BIG_INCREMENT;
} else if (event.key === 'PageDown') {
increment = -BIG_INCREMENT;
} else {
// We don't handle other keys
return;
}
event.preventDefault();
event.stopPropagation();
const currentPosition = currentTime / duration;
const positionIncrement = increment / duration;
const newPosition = currentPosition + positionIncrement;
onScrub(newPosition);
};
return (
<div
ref={refMerger(waveformRef, ref)}
className="WaveformScrubber"
onClick={handleClick}
onKeyDown={handleKeyDown}
tabIndex={0}
role="slider"
aria-label={i18n('MessageAudio--slider')}
aria-orientation="horizontal"
aria-valuenow={currentTime}
aria-valuemin={0}
aria-valuemax={duration}
aria-valuetext={durationToPlaybackText(currentTime)}
>
<Waveform
peaks={peaks}
barMinHeight={barMinHeight}
barMaxHeight={barMaxHeight}
currentTime={currentTime}
duration={duration}
/>
</div>
);
});