Moves AudioCapture into react

This commit is contained in:
Josh Perez 2021-09-29 16:23:06 -04:00 committed by GitHub
parent c170d04ffa
commit 603c315c82
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
23 changed files with 1012 additions and 492 deletions

View file

@ -909,6 +909,18 @@
"message": "Original message found, but not loaded. Scroll up to load it.",
"description": "Shown in toast if user clicks on quote references messages not loaded in view, but in database"
},
"voiceRecording--start": {
"message": "Start recording voice message",
"description": "Tooltip for microphone button to start voice message"
},
"voiceRecording--complete": {
"message": "Complete voice message and send",
"description": "Tooltip for green complete voice message and send"
},
"voiceRecording--cancel": {
"message": "Cancel voice message",
"description": "Tooltip for red button to cancel voice message"
},
"voiceRecordingInterruptedMax": {
"message": "Voice message recording stopped because the maximum time limit was reached.",
"description": "Confirmation dialog message for when the voice recording is interrupted due to max time limit"

View file

@ -466,7 +466,6 @@ try {
require('./ts/views/conversation_view');
require('./ts/views/inbox_view');
require('./ts/views/install_view');
require('./ts/views/recorder_view');
require('./ts/views/standalone_registration_view');
require('./ts/SignalProtocolStore');
require('./ts/background');

View file

@ -0,0 +1,128 @@
// Copyright 2016-2020 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
.AudioCapture {
display: flex;
text-align: center;
flex-direction: row;
justify-content: center;
align-items: center;
background: none;
&__microphone {
height: 32px;
width: 32px;
text-align: center;
opacity: 0.5;
background: none;
padding: 0;
border: none;
&:focus,
&:hover {
opacity: 1;
}
outline: none;
&:before {
content: '';
display: inline-block;
height: 24px;
width: 24px;
@include light-theme {
@include color-svg(
'../images/icons/v2/mic-outline-24.svg',
$color-gray-75
);
}
@include dark-theme {
@include color-svg(
'../images/icons/v2/mic-solid-24.svg',
$color-gray-15
);
}
}
}
&__recorder-button {
flex-grow: 0;
flex-shrink: 0;
width: 32px;
height: 32px;
border-radius: 32px;
margin-left: 5px;
opacity: 0.3;
text-align: center;
padding: 0;
&:focus,
&:hover {
opacity: 1;
}
outline: none;
.icon {
display: inline-block;
width: 24px;
height: 24px;
margin-bottom: -3px;
}
&--complete {
background: lighten($color-accent-green, 20%);
border: 1px solid $color-accent-green;
.icon {
@include color-svg(
'../images/icons/v2/check-24.svg',
$color-accent-green
);
}
}
&--cancel {
background: lighten($color-accent-red, 20%);
border: 1px solid $color-accent-red;
.icon {
@include color-svg('../images/icons/v2/x-24.svg', $color-accent-red);
}
}
}
&__time {
color: $color-gray-60;
font-variant: tabular-nums;
line-height: 36px;
padding: 0 10px;
@keyframes pulse {
0% {
opacity: 0;
}
50% {
opacity: 1;
}
100% {
opacity: 0;
}
}
&::before {
content: '';
display: inline-block;
border-radius: 10px;
width: 10px;
height: 10px;
background: $color-accent-red;
margin-right: 10px;
opacity: 0;
animation: pulse 2s infinite;
}
}
}

View file

@ -11,6 +11,7 @@
position: absolute;
text-align: center;
transform: translate(-50%, 0);
user-select: none;
z-index: 100;
@include light-theme {

View file

@ -30,6 +30,7 @@
@import './components/AddGroupMembersModal.scss';
@import './components/AnnouncementsOnlyGroupBanner.scss';
@import './components/App.scss';
@import './components/AudioCapture.scss';
@import './components/Avatar.scss';
@import './components/AvatarEditor.scss';
@import './components/AvatarModalButtons.scss';

View file

@ -284,11 +284,6 @@
src="../js/views/group_member_list_view.js"
data-cover
></script>
<script
type="text/javascript"
src="../js/views/recorder_view.js"
data-cover
></script>
<script
type="text/javascript"
src="../js/views/inbox_view.js"

View file

@ -1013,6 +1013,10 @@ export async function startApp(): Promise<void> {
actionCreators.audioPlayer,
store.dispatch
),
audioRecorder: bindActionCreators(
actionCreators.audioRecorder,
store.dispatch
),
calling: bindActionCreators(actionCreators.calling, store.dispatch),
composer: bindActionCreators(actionCreators.composer, store.dispatch),
conversations: bindActionCreators(
@ -1407,18 +1411,7 @@ export async function startApp(): Promise<void> {
// Open sticker picker - handled by component
// Begin recording voice note
if (
conversation &&
commandOrCtrl &&
shiftKey &&
(key === 'v' || key === 'V')
) {
conversation.trigger('begin-recording');
event.preventDefault();
event.stopPropagation();
return;
}
// Begin recording voice note - handled by component
// Archive or unarchive conversation
if (

View file

@ -19,23 +19,12 @@ const story = storiesOf('Components/CompositionArea', module);
// necessary for the add attachment button to render properly
story.addDecorator(storyFn => <div className="file-input">{storyFn()}</div>);
// necessary for the mic button to render properly
const micCellEl = new DOMParser().parseFromString(
`
<div class="capture-audio">
<button class="microphone"></button>
</div>
`,
'text/html'
).body.firstElementChild as HTMLElement;
const createProps = (overrideProps: Partial<Props> = {}): Props => ({
conversationId: '123',
i18n,
micCellEl,
addAttachment: action('addAttachment'),
addPendingAttachment: action('addPendingAttachment'),
conversationId: '123',
i18n,
onSendMessage: action('onSendMessage'),
processAttachments: action('processAttachments'),
removeAttachment: action('removeAttachment'),
@ -43,6 +32,12 @@ const createProps = (overrideProps: Partial<Props> = {}): Props => ({
draftAttachments: overrideProps.draftAttachments || [],
onClearAttachments: action('onClearAttachments'),
onClickAttachment: action('onClickAttachment'),
// AudioCapture
cancelRecording: action('cancelRecording'),
completeRecording: action('completeRecording'),
errorRecording: action('errorRecording'),
isRecording: Boolean(overrideProps.isRecording),
startRecording: action('startRecording'),
// StagedLinkPreview
linkPreviewLoading: Boolean(overrideProps.linkPreviewLoading),
linkPreviewResult: overrideProps.linkPreviewResult,
@ -57,7 +52,6 @@ const createProps = (overrideProps: Partial<Props> = {}): Props => ({
overrideProps.shouldSendHighQualityAttachments
),
// CompositionInput
onSubmit: action('onSubmit'),
onEditorStateChange: action('onEditorStateChange'),
onTextTooLong: action('onTextTooLong'),
draftText: overrideProps.draftText || undefined,

View file

@ -5,12 +5,14 @@ import React, {
MutableRefObject,
useCallback,
useEffect,
useLayoutEffect,
useRef,
useState,
} from 'react';
import { get, noop } from 'lodash';
import { get } from 'lodash';
import classNames from 'classnames';
import type { BodyRangeType, BodyRangesType } from '../types/Util';
import type { ErrorDialogAudioRecorderType } from '../state/ducks/audioRecorder';
import type { HandleAttachmentsProcessingArgsType } from '../util/handleAttachmentsProcessing';
import { Spinner } from './Spinner';
import { EmojiButton, Props as EmojiButtonProps } from './emoji/EmojiButton';
import {
@ -34,26 +36,25 @@ import {
GroupV2PendingApprovalActions,
PropsType as GroupV2PendingApprovalActionsPropsType,
} from './conversation/GroupV2PendingApprovalActions';
import { MandatoryProfileSharingActions } from './conversation/MandatoryProfileSharingActions';
import { countStickers } from './stickers/lib';
import { LocalizerType } from '../types/Util';
import { EmojiPickDataType } from './emoji/EmojiPicker';
import { AttachmentType, isImageAttachment } from '../types/Attachment';
import { AnnouncementsOnlyGroupBanner } from './AnnouncementsOnlyGroupBanner';
import { AttachmentList } from './conversation/AttachmentList';
import { AttachmentType, isImageAttachment } from '../types/Attachment';
import { AudioCapture } from './conversation/AudioCapture';
import { CompositionUpload } from './CompositionUpload';
import { ConversationType } from '../state/ducks/conversations';
import { EmojiPickDataType } from './emoji/EmojiPicker';
import { LinkPreviewWithDomain } from '../types/LinkPreview';
import { LocalizerType } from '../types/Util';
import { MandatoryProfileSharingActions } from './conversation/MandatoryProfileSharingActions';
import { MediaQualitySelector } from './MediaQualitySelector';
import { Quote, Props as QuoteProps } from './conversation/Quote';
import { StagedLinkPreview } from './conversation/StagedLinkPreview';
import { LinkPreviewWithDomain } from '../types/LinkPreview';
import { ConversationType } from '../state/ducks/conversations';
import { AnnouncementsOnlyGroupBanner } from './AnnouncementsOnlyGroupBanner';
import { CompositionUpload } from './CompositionUpload';
import type { HandleAttachmentsProcessingArgsType } from '../util/handleAttachmentsProcessing';
import { countStickers } from './stickers/lib';
export type CompositionAPIType = {
focusInput: () => void;
isDirty: () => boolean;
setDisabled: (disabled: boolean) => void;
setMicActive: (micActive: boolean) => void;
reset: InputApi['reset'];
resetEmojiResults: InputApi['resetEmojiResults'];
};
@ -72,27 +73,41 @@ export type OwnProps = Readonly<{
areWeAdmin?: boolean;
areWePending?: boolean;
areWePendingApproval?: boolean;
cancelRecording: () => unknown;
completeRecording: (
conversationId: string,
onSendAudioRecording?: (rec: AttachmentType) => unknown
) => unknown;
compositionApi?: MutableRefObject<CompositionAPIType>;
conversationId: string;
draftAttachments: ReadonlyArray<AttachmentType>;
errorDialogAudioRecorderType?: ErrorDialogAudioRecorderType;
errorRecording: (e: ErrorDialogAudioRecorderType) => unknown;
groupAdmins: Array<ConversationType>;
groupVersion?: 1 | 2;
i18n: LocalizerType;
isFetchingUUID?: boolean;
isGroupV1AndDisabled?: boolean;
isMissingMandatoryProfileSharing?: boolean;
isRecording: boolean;
isSMSOnly?: boolean;
left?: boolean;
linkPreviewLoading: boolean;
linkPreviewResult?: LinkPreviewWithDomain;
messageRequestsEnabled?: boolean;
micCellEl?: HTMLElement;
onClearAttachments(): unknown;
onClickAttachment(): unknown;
onClickQuotedMessage(): unknown;
onCloseLinkPreview(): unknown;
processAttachments: (options: HandleAttachmentsProcessingArgsType) => unknown;
onSelectMediaQuality(isHQ: boolean): unknown;
onSendMessage(options: {
draftAttachments?: ReadonlyArray<AttachmentType>;
mentions?: BodyRangesType;
message?: string;
timestamp?: number;
voiceNoteAttachment?: AttachmentType;
}): unknown;
openConversation(conversationId: string): unknown;
quotedMessageProps?: Omit<
QuoteProps,
@ -101,12 +116,12 @@ export type OwnProps = Readonly<{
removeAttachment: (conversationId: string, filePath: string) => unknown;
setQuotedMessage(message: undefined): unknown;
shouldSendHighQualityAttachments: boolean;
startRecording: () => unknown;
}>;
export type Props = Pick<
CompositionInputProps,
| 'sortedGroupMembers'
| 'onSubmit'
| 'onEditorStateChange'
| 'onTextTooLong'
| 'draftText'
@ -138,19 +153,13 @@ export type Props = Pick<
Pick<GroupV2PendingApprovalActionsPropsType, 'onCancelJoinRequest'> &
OwnProps;
const emptyElement = (el: HTMLElement) => {
// Necessary to deal with Backbone views
// eslint-disable-next-line no-param-reassign
el.innerHTML = '';
};
export const CompositionArea = ({
// Base props
addAttachment,
addPendingAttachment,
conversationId,
i18n,
micCellEl,
onSendMessage,
processAttachments,
removeAttachment,
@ -158,6 +167,13 @@ export const CompositionArea = ({
draftAttachments,
onClearAttachments,
onClickAttachment,
// AudioCapture
cancelRecording,
completeRecording,
errorDialogAudioRecorderType,
errorRecording,
isRecording,
startRecording,
// StagedLinkPreview
linkPreviewLoading,
linkPreviewResult,
@ -170,7 +186,6 @@ export const CompositionArea = ({
onSelectMediaQuality,
shouldSendHighQualityAttachments,
// CompositionInput
onSubmit,
compositionApi,
onEditorStateChange,
onTextTooLong,
@ -227,7 +242,6 @@ export const CompositionArea = ({
isFetchingUUID,
}: Props): JSX.Element => {
const [disabled, setDisabled] = useState(false);
const [micActive, setMicActive] = useState(false);
const [dirty, setDirty] = useState(false);
const [large, setLarge] = useState(false);
const inputApiRef = useRef<InputApi | undefined>();
@ -240,12 +254,17 @@ export const CompositionArea = ({
}
}, [inputApiRef, setLarge]);
const handleSubmit = useCallback<typeof onSubmit>(
(...args) => {
const handleSubmit = useCallback(
(message: string, mentions: Array<BodyRangeType>, timestamp: number) => {
setLarge(false);
onSubmit(...args);
onSendMessage({
draftAttachments,
mentions,
message,
timestamp,
});
},
[setLarge, onSubmit]
[draftAttachments, onSendMessage, setLarge]
);
const launchAttachmentPicker = () => {
@ -279,7 +298,6 @@ export const CompositionArea = ({
isDirty: () => dirty,
focusInput,
setDisabled,
setMicActive,
reset: () => {
if (inputApiRef.current) {
inputApiRef.current.reset();
@ -309,19 +327,6 @@ export const CompositionArea = ({
const shouldShowMicrophone = !draftAttachments.length && !draftText;
// The following is a work-around to allow react to lay-out backbone-managed
// dom nodes until those functions are in React
const micCellRef = useRef<HTMLDivElement>(null);
useLayoutEffect(() => {
const { current: micCellContainer } = micCellRef;
if (micCellContainer && micCellEl) {
emptyElement(micCellContainer);
micCellContainer.appendChild(micCellEl);
}
return noop;
}, [micCellRef, micCellEl, large, dirty, shouldShowMicrophone]);
const showMediaQualitySelector = draftAttachments.some(isImageAttachment);
const leftHandSideButtonsFragment = (
@ -350,16 +355,19 @@ export const CompositionArea = ({
);
const micButtonFragment = shouldShowMicrophone ? (
<div
className={classNames(
'CompositionArea__button-cell',
micActive ? 'CompositionArea__button-cell--mic-active' : null,
large ? 'CompositionArea__button-cell--large-right' : null,
micActive && large
? 'CompositionArea__button-cell--large-right-mic-active'
: null
)}
ref={micCellRef}
<AudioCapture
cancelRecording={cancelRecording}
completeRecording={completeRecording}
conversationId={conversationId}
draftAttachments={draftAttachments}
errorDialogAudioRecorderType={errorDialogAudioRecorderType}
errorRecording={errorRecording}
i18n={i18n}
isRecording={isRecording}
onSendAudioRecording={(voiceNoteAttachment: AttachmentType) => {
onSendMessage({ voiceNoteAttachment });
}}
startRecording={startRecording}
/>
) : null;

View file

@ -0,0 +1,66 @@
// Copyright 2020 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import * as React from 'react';
import { action } from '@storybook/addon-actions';
import { storiesOf } from '@storybook/react';
import { boolean } from '@storybook/addon-knobs';
import { ErrorDialogAudioRecorderType } from '../../state/ducks/audioRecorder';
import { AudioCapture, PropsType } from './AudioCapture';
import { setupI18n } from '../../util/setupI18n';
import enMessages from '../../../_locales/en/messages.json';
const i18n = setupI18n('en', enMessages);
const story = storiesOf('Components/Conversation/AudioCapture', module);
const createProps = (overrideProps: Partial<PropsType> = {}): PropsType => ({
cancelRecording: action('cancelRecording'),
completeRecording: action('completeRecording'),
conversationId: '123',
draftAttachments: [],
errorDialogAudioRecorderType: overrideProps.errorDialogAudioRecorderType,
errorRecording: action('errorRecording'),
i18n,
isRecording: boolean('isRecording', overrideProps.isRecording || false),
onSendAudioRecording: action('onSendAudioRecording'),
startRecording: action('startRecording'),
});
story.add('Default', () => {
return <AudioCapture {...createProps()} />;
});
story.add('Recording', () => {
return (
<AudioCapture
{...createProps({
isRecording: true,
})}
/>
);
});
story.add('Voice Limit', () => {
return (
<AudioCapture
{...createProps({
errorDialogAudioRecorderType: ErrorDialogAudioRecorderType.Timeout,
isRecording: true,
})}
/>
);
});
story.add('Switched Apps', () => {
return (
<AudioCapture
{...createProps({
errorDialogAudioRecorderType: ErrorDialogAudioRecorderType.Blur,
isRecording: true,
})}
/>
);
});

View file

@ -0,0 +1,227 @@
// Copyright 2016-2020 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import React, { useCallback, useEffect, useMemo, useState } from 'react';
import * as moment from 'moment';
import { noop } from 'lodash';
import { AttachmentType } from '../../types/Attachment';
import { ConfirmationDialog } from '../ConfirmationDialog';
import { LocalizerType } from '../../types/Util';
import { ErrorDialogAudioRecorderType } from '../../state/ducks/audioRecorder';
import { ToastVoiceNoteLimit } from '../ToastVoiceNoteLimit';
import { ToastVoiceNoteMustBeOnlyAttachment } from '../ToastVoiceNoteMustBeOnlyAttachment';
import { useEscapeHandling } from '../../hooks/useEscapeHandling';
import {
getStartRecordingShortcut,
useKeyboardShortcuts,
} from '../../hooks/useKeyboardShortcuts';
type OnSendAudioRecordingType = (rec: AttachmentType) => unknown;
export type PropsType = {
cancelRecording: () => unknown;
conversationId: string;
completeRecording: (
conversationId: string,
onSendAudioRecording?: OnSendAudioRecordingType
) => unknown;
draftAttachments: ReadonlyArray<AttachmentType>;
errorDialogAudioRecorderType?: ErrorDialogAudioRecorderType;
errorRecording: (e: ErrorDialogAudioRecorderType) => unknown;
i18n: LocalizerType;
isRecording: boolean;
onSendAudioRecording: OnSendAudioRecordingType;
startRecording: () => unknown;
};
enum ToastType {
VoiceNoteLimit,
VoiceNoteMustBeOnlyAttachment,
}
const START_DURATION_TEXT = '0:00';
export const AudioCapture = ({
cancelRecording,
completeRecording,
conversationId,
draftAttachments,
errorDialogAudioRecorderType,
errorRecording,
i18n,
isRecording,
onSendAudioRecording,
startRecording,
}: PropsType): JSX.Element => {
const [durationText, setDurationText] = useState<string>(START_DURATION_TEXT);
const [toastType, setToastType] = useState<ToastType | undefined>();
// Cancel recording if we switch away from this conversation, unmounting
useEffect(() => {
if (!isRecording) {
return;
}
return () => {
cancelRecording();
};
}, [cancelRecording, isRecording]);
// Stop recording and show confirmation if user switches away from this app
useEffect(() => {
if (!isRecording) {
return;
}
const handler = () => {
errorRecording(ErrorDialogAudioRecorderType.Blur);
};
window.addEventListener('blur', handler);
return () => {
window.removeEventListener('blur', handler);
};
}, [isRecording, completeRecording, errorRecording]);
const escapeRecording = useCallback(() => {
if (!isRecording) {
return;
}
cancelRecording();
}, [cancelRecording, isRecording]);
useEscapeHandling(escapeRecording);
const startRecordingShortcut = useMemo(() => {
return getStartRecordingShortcut(startRecording);
}, [startRecording]);
useKeyboardShortcuts(startRecordingShortcut);
// Update timestamp regularly, then timeout if recording goes over five minutes
useEffect(() => {
if (!isRecording) {
return;
}
const startTime = Date.now();
const interval = setInterval(() => {
const duration = moment.duration(Date.now() - startTime, 'ms');
const minutes = `${Math.trunc(duration.asMinutes())}`;
let seconds = `${duration.seconds()}`;
if (seconds.length < 2) {
seconds = `0${seconds}`;
}
setDurationText(`${minutes}:${seconds}`);
if (duration >= moment.duration(5, 'minutes')) {
errorRecording(ErrorDialogAudioRecorderType.Timeout);
}
}, 1000);
return () => {
clearInterval(interval);
};
}, [completeRecording, errorRecording, isRecording, setDurationText]);
const clickCancel = useCallback(() => {
cancelRecording();
}, [cancelRecording]);
const clickSend = useCallback(() => {
completeRecording(conversationId, onSendAudioRecording);
}, [conversationId, completeRecording, onSendAudioRecording]);
function closeToast() {
setToastType(undefined);
}
let toastElement: JSX.Element | undefined;
if (toastType === ToastType.VoiceNoteLimit) {
toastElement = <ToastVoiceNoteLimit i18n={i18n} onClose={closeToast} />;
} else if (toastType === ToastType.VoiceNoteMustBeOnlyAttachment) {
toastElement = (
<ToastVoiceNoteMustBeOnlyAttachment i18n={i18n} onClose={closeToast} />
);
}
let confirmationDialogText: string | undefined;
if (errorDialogAudioRecorderType === ErrorDialogAudioRecorderType.Blur) {
confirmationDialogText = i18n('voiceRecordingInterruptedBlur');
} else if (
errorDialogAudioRecorderType === ErrorDialogAudioRecorderType.Timeout
) {
confirmationDialogText = i18n('voiceRecordingInterruptedMax');
}
if (isRecording && !confirmationDialogText) {
return (
<>
<div className="AudioCapture">
<button
className="AudioCapture__recorder-button AudioCapture__recorder-button--complete"
onClick={clickSend}
tabIndex={0}
title={i18n('voiceRecording--complete')}
type="button"
>
<span className="icon" />
</button>
<span className="AudioCapture__time">{durationText}</span>
<button
className="AudioCapture__recorder-button AudioCapture__recorder-button--cancel"
onClick={clickCancel}
tabIndex={0}
title={i18n('voiceRecording--cancel')}
type="button"
>
<span className="icon" />
</button>
</div>
{toastElement}
</>
);
}
return (
<>
<div className="AudioCapture">
<button
aria-label={i18n('voiceRecording--start')}
className="AudioCapture__microphone"
onClick={() => {
if (draftAttachments.length) {
setToastType(ToastType.VoiceNoteMustBeOnlyAttachment);
} else {
setDurationText(START_DURATION_TEXT);
setToastType(ToastType.VoiceNoteLimit);
startRecording();
}
}}
title={i18n('voiceRecording--start')}
type="button"
/>
{confirmationDialogText ? (
<ConfirmationDialog
i18n={i18n}
onCancel={clickCancel}
onClose={noop}
cancelText={i18n('discard')}
actions={[
{
text: i18n('sendAnyway'),
style: 'affirmative',
action: clickSend,
},
]}
>
{confirmationDialogText}
</ConfirmationDialog>
) : null}
</div>
{toastElement}
</>
);
};

View file

@ -0,0 +1,47 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { useEffect } from 'react';
import { get } from 'lodash';
type KeyboardShortcutHandlerType = (ev: KeyboardEvent) => boolean;
function isCmdOrCtrl(ev: KeyboardEvent): boolean {
const { ctrlKey, metaKey } = ev;
const commandKey = get(window, 'platform') === 'darwin' && metaKey;
const controlKey = get(window, 'platform') !== 'darwin' && ctrlKey;
return commandKey || controlKey;
}
export function getStartRecordingShortcut(
startAudioRecording: () => unknown
): KeyboardShortcutHandlerType {
return ev => {
const { key, shiftKey } = ev;
if (isCmdOrCtrl(ev) && shiftKey && (key === 'v' || key === 'V')) {
startAudioRecording();
ev.preventDefault();
ev.stopPropagation();
return true;
}
return false;
};
}
export function useKeyboardShortcuts(
...eventHandlers: Array<KeyboardShortcutHandlerType>
): void {
useEffect(() => {
function handleKeydown(ev: KeyboardEvent): void {
eventHandlers.some(eventHandler => eventHandler(ev));
}
document.addEventListener('keydown', handleKeydown);
return () => {
document.removeEventListener('keydown', handleKeydown);
};
}, [eventHandlers]);
}

View file

@ -0,0 +1,132 @@
// Copyright 2016-2020 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import * as log from '../logging/log';
import { WebAudioRecorderClass } from '../window.d';
export class RecorderClass {
private context?: AudioContext;
private input?: GainNode;
private recorder?: WebAudioRecorderClass;
private source?: MediaStreamAudioSourceNode;
private blob?: Blob;
private resolve?: (blob: Blob) => void;
clear(): void {
this.blob = undefined;
this.resolve = undefined;
if (this.source) {
this.source.disconnect();
this.source = undefined;
}
if (this.recorder) {
if (this.recorder.isRecording()) {
this.recorder.cancelRecording();
}
// Reach in and terminate the web worker used by WebAudioRecorder, otherwise
// it gets leaked due to a reference cycle with its onmessage listener
this.recorder.worker.terminate();
this.recorder = undefined;
}
this.input = undefined;
if (this.context) {
this.context.close();
this.context = undefined;
}
}
async start(): Promise<void> {
this.clear();
this.context = new AudioContext();
this.input = this.context.createGain();
this.recorder = new window.WebAudioRecorder(this.input, {
encoding: 'mp3',
workerDir: 'js/', // must end with slash
options: {
timeLimit: 360, // one minute more than our UI-imposed limit
},
});
this.recorder.onComplete = this.onComplete.bind(this);
this.recorder.onError = this.onError.bind(this);
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
if (!this.context || !this.input) {
this.onError(
this.recorder,
new Error('Recorder/getUserMedia/stream: Missing context or input!')
);
return;
}
this.source = this.context.createMediaStreamSource(stream);
this.source.connect(this.input);
} catch (err) {
log.error('Recorder.onGetUserMediaError:', err);
this.clear();
}
if (this.recorder) {
this.recorder.startRecording();
}
}
async stop(): Promise<Blob | undefined> {
if (!this.recorder) {
log.warn('Recorder/stop: Called with no recorder');
return;
}
if (this.blob) {
return this.blob;
}
const promise = new Promise<Blob>(resolve => {
this.resolve = resolve;
});
this.recorder.finishRecording();
return promise;
}
onComplete(_recorder: WebAudioRecorderClass, blob: Blob): void {
this.blob = blob;
this.resolve?.(blob);
}
onError(_recorder: WebAudioRecorderClass, error: Error): void {
if (!this.recorder) {
log.warn('Recorder/onError: Called with no recorder');
return;
}
this.clear();
if (error && error.name === 'NotAllowedError') {
log.warn('Recorder/onError: Microphone permission missing');
window.showPermissionsPopup();
} else {
log.error(
'Recorder/onError:',
error && error.stack ? error.stack : error
);
}
}
getBlob(): Blob {
if (!this.blob) {
throw new Error('no blob found');
}
return this.blob;
}
}
export const recorder = new RecorderClass();

View file

@ -4,6 +4,7 @@
import { actions as accounts } from './ducks/accounts';
import { actions as app } from './ducks/app';
import { actions as audioPlayer } from './ducks/audioPlayer';
import { actions as audioRecorder } from './ducks/audioRecorder';
import { actions as calling } from './ducks/calling';
import { actions as composer } from './ducks/composer';
import { actions as conversations } from './ducks/conversations';
@ -24,6 +25,7 @@ export const actionCreators: ReduxActions = {
accounts,
app,
audioPlayer,
audioRecorder,
calling,
composer,
conversations,
@ -44,6 +46,7 @@ export const mapDispatchToProps = {
...accounts,
...app,
...audioPlayer,
...audioRecorder,
...calling,
...composer,
...conversations,

View file

@ -0,0 +1,199 @@
// Copyright 2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { ThunkAction } from 'redux-thunk';
import * as log from '../../logging/log';
import { AttachmentType } from '../../types/Attachment';
import { SignalService as Proto } from '../../protobuf';
import { StateType as RootStateType } from '../reducer';
import { fileToBytes } from '../../util/fileToBytes';
import { recorder } from '../../services/audioRecorder';
import { stringToMIMEType } from '../../types/MIME';
import { useBoundActions } from '../../hooks/useBoundActions';
export enum ErrorDialogAudioRecorderType {
Blur,
Timeout,
}
// State
export type AudioPlayerStateType = {
readonly isRecording: boolean;
readonly errorDialogAudioRecorderType?: ErrorDialogAudioRecorderType;
};
// Actions
const CANCEL_RECORDING = 'audioRecorder/CANCEL_RECORDING';
const COMPLETE_RECORDING = 'audioRecorder/COMPLETE_RECORDING';
const ERROR_RECORDING = 'audioRecorder/ERROR_RECORDING';
const START_RECORDING = 'audioRecorder/START_RECORDING';
type CancelRecordingAction = {
type: typeof CANCEL_RECORDING;
payload: undefined;
};
type CompleteRecordingAction = {
type: typeof COMPLETE_RECORDING;
payload: undefined;
};
type ErrorRecordingAction = {
type: typeof ERROR_RECORDING;
payload: ErrorDialogAudioRecorderType;
};
type StartRecordingAction = {
type: typeof START_RECORDING;
payload: undefined;
};
type AudioPlayerActionType =
| CancelRecordingAction
| CompleteRecordingAction
| ErrorRecordingAction
| StartRecordingAction;
// Action Creators
export const actions = {
cancelRecording,
completeRecording,
errorRecording,
startRecording,
};
export const useActions = (): typeof actions => useBoundActions(actions);
function startRecording(): ThunkAction<
void,
RootStateType,
unknown,
StartRecordingAction
> {
return (dispatch, getState) => {
if (getState().composer.attachments.length) {
return;
}
recorder.start();
dispatch({
type: START_RECORDING,
payload: undefined,
});
};
}
function completeRecordingAction(): CompleteRecordingAction {
return {
type: COMPLETE_RECORDING,
payload: undefined,
};
}
function completeRecording(
conversationId: string,
onSendAudioRecording?: (rec: AttachmentType) => unknown
): ThunkAction<
void,
RootStateType,
unknown,
CancelRecordingAction | CompleteRecordingAction
> {
return async (dispatch, getState) => {
const state = getState();
const isSelectedConversation =
state.conversations.selectedConversationId === conversationId;
if (!isSelectedConversation) {
log.warn(
'completeRecording: Recording started in one conversation and completed in another'
);
dispatch(cancelRecording());
return;
}
const blob = await recorder.stop();
try {
if (!blob) {
throw new Error('completeRecording: no blob returned');
}
const data = await fileToBytes(blob);
const voiceNoteAttachment = {
contentType: stringToMIMEType(blob.type),
data,
size: data.byteLength,
flags: Proto.AttachmentPointer.Flags.VOICE_MESSAGE,
};
if (onSendAudioRecording) {
onSendAudioRecording(voiceNoteAttachment);
}
} finally {
dispatch(completeRecordingAction());
}
};
}
function cancelRecording(): CancelRecordingAction {
recorder.clear();
return {
type: CANCEL_RECORDING,
payload: undefined,
};
}
function errorRecording(
errorDialogAudioRecorderType: ErrorDialogAudioRecorderType
): ErrorRecordingAction {
recorder.stop();
return {
type: ERROR_RECORDING,
payload: errorDialogAudioRecorderType,
};
}
// Reducer
function getEmptyState(): AudioPlayerStateType {
return {
isRecording: false,
};
}
export function reducer(
state: Readonly<AudioPlayerStateType> = getEmptyState(),
action: Readonly<AudioPlayerActionType>
): AudioPlayerStateType {
if (action.type === START_RECORDING) {
return {
...state,
errorDialogAudioRecorderType: undefined,
isRecording: true,
};
}
if (action.type === CANCEL_RECORDING || action.type === COMPLETE_RECORDING) {
return {
...state,
errorDialogAudioRecorderType: undefined,
isRecording: false,
};
}
if (action.type === ERROR_RECORDING) {
return {
...state,
errorDialogAudioRecorderType: action.payload,
isRecording: false,
};
}
return state;
}

View file

@ -6,6 +6,7 @@ import { combineReducers } from 'redux';
import { reducer as accounts } from './ducks/accounts';
import { reducer as app } from './ducks/app';
import { reducer as audioPlayer } from './ducks/audioPlayer';
import { reducer as audioRecorder } from './ducks/audioRecorder';
import { reducer as calling } from './ducks/calling';
import { reducer as composer } from './ducks/composer';
import { reducer as conversations } from './ducks/conversations';
@ -26,6 +27,7 @@ export const reducer = combineReducers({
accounts,
app,
audioPlayer,
audioRecorder,
calling,
composer,
conversations,

View file

@ -80,6 +80,10 @@ const mapStateToProps = (state: StateType, props: ExternalProps) => {
// Base
conversationId: id,
i18n: getIntl(state),
// AudioCapture
errorDialogAudioRecorderType:
state.audioRecorder.errorDialogAudioRecorderType,
isRecording: state.audioRecorder.isRecording,
// AttachmentsList
draftAttachments,
// MediaQualitySelector

View file

@ -4,6 +4,7 @@
import { actions as accounts } from './ducks/accounts';
import { actions as app } from './ducks/app';
import { actions as audioPlayer } from './ducks/audioPlayer';
import { actions as audioRecorder } from './ducks/audioRecorder';
import { actions as calling } from './ducks/calling';
import { actions as composer } from './ducks/composer';
import { actions as conversations } from './ducks/conversations';
@ -23,6 +24,7 @@ export type ReduxActions = {
accounts: typeof accounts;
app: typeof app;
audioPlayer: typeof audioPlayer;
audioRecorder: typeof audioRecorder;
calling: typeof calling;
composer: typeof composer;
conversations: typeof conversations;

View file

@ -12281,29 +12281,6 @@
"reasonCategory": "usageTrusted",
"updated": "2021-07-30T16:57:33.618Z"
},
{
"rule": "DOM-innerHTML",
"path": "ts/components/CompositionArea.js",
"line": " el.innerHTML = '';",
"reasonCategory": "usageTrusted",
"updated": "2020-05-20T20:10:43.540Z",
"reasonDetail": "Our code, no user input, only clearing out the dom"
},
{
"rule": "DOM-innerHTML",
"path": "ts/components/CompositionArea.tsx",
"line": " el.innerHTML = '';",
"reasonCategory": "usageTrusted",
"updated": "2020-06-03T19:23:21.195Z",
"reasonDetail": "Our code, no user input, only clearing out the dom"
},
{
"rule": "React-useRef",
"path": "ts/components/CompositionArea.tsx",
"line": " const micCellRef = useRef<HTMLDivElement>(null);",
"reasonCategory": "usageTrusted",
"updated": "2021-07-30T16:57:33.618Z"
},
{
"rule": "React-useRef",
"path": "ts/components/CompositionArea.tsx",
@ -13952,76 +13929,6 @@
"reasonCategory": "usageTrusted",
"updated": "2021-09-15T21:07:50.995Z"
},
{
"rule": "jQuery-$(",
"path": "ts/views/recorder_view.js",
"line": " template: () => $('#recorder').html(),",
"reasonCategory": "usageTrusted",
"updated": "2021-09-15T21:07:50.995Z"
},
{
"rule": "jQuery-$(",
"path": "ts/views/recorder_view.js",
"line": " $(window).on('blur', this.onSwitchAwayBound);",
"reasonCategory": "usageTrusted",
"updated": "2021-09-15T21:07:50.995Z"
},
{
"rule": "jQuery-$(",
"path": "ts/views/recorder_view.js",
"line": " this.$('.time').text(`${minutes}:${seconds}`);",
"reasonCategory": "usageTrusted",
"updated": "2021-09-15T21:07:50.995Z"
},
{
"rule": "jQuery-$(",
"path": "ts/views/recorder_view.js",
"line": " $(window).off('blur', this.onSwitchAwayBound);",
"reasonCategory": "usageTrusted",
"updated": "2021-09-15T21:07:50.995Z"
},
{
"rule": "jQuery-html(",
"path": "ts/views/recorder_view.js",
"line": " template: () => $('#recorder').html(),",
"reasonCategory": "usageTrusted",
"updated": "2021-09-15T21:07:50.995Z"
},
{
"rule": "jQuery-$(",
"path": "ts/views/recorder_view.ts",
"line": " template: () => $('#recorder').html(),",
"reasonCategory": "usageTrusted",
"updated": "2021-09-15T21:07:50.995Z"
},
{
"rule": "jQuery-$(",
"path": "ts/views/recorder_view.ts",
"line": " $(window).on('blur', this.onSwitchAwayBound);",
"reasonCategory": "usageTrusted",
"updated": "2021-09-15T21:07:50.995Z"
},
{
"rule": "jQuery-$(",
"path": "ts/views/recorder_view.ts",
"line": " this.$('.time').text(`${minutes}:${seconds}`);",
"reasonCategory": "usageTrusted",
"updated": "2021-09-15T21:07:50.995Z"
},
{
"rule": "jQuery-$(",
"path": "ts/views/recorder_view.ts",
"line": " $(window).off('blur', this.onSwitchAwayBound);",
"reasonCategory": "usageTrusted",
"updated": "2021-09-15T21:07:50.995Z"
},
{
"rule": "jQuery-html(",
"path": "ts/views/recorder_view.ts",
"line": " template: () => $('#recorder').html(),",
"reasonCategory": "usageTrusted",
"updated": "2021-09-15T21:07:50.995Z"
},
{
"rule": "jQuery-$(",
"path": "ts/views/standalone_registration_view.js",

View file

@ -0,0 +1,30 @@
// Copyright 2020-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import * as log from '../logging/log';
import { AttachmentType } from '../types/Attachment';
export async function resolveAttachmentDraftData(
attachment?: AttachmentType
): Promise<AttachmentType | undefined> {
if (!attachment || attachment.pending) {
return;
}
if (!attachment.path) {
return;
}
const data = await window.Signal.Migrations.readDraftData(attachment.path);
if (data.byteLength !== attachment.size) {
log.error(
`Attachment size from disk ${data.byteLength} did not match attachment size ${attachment.size}`
);
return;
}
return {
...attachment,
data,
};
}

View file

@ -65,24 +65,18 @@ import {
LinkPreviewWithDomain,
} from '../types/LinkPreview';
import * as LinkPreview from '../types/LinkPreview';
import { SignalService as Proto } from '../protobuf';
import { autoScale } from '../util/handleImageAttachment';
import { ReadStatus } from '../messages/MessageReadStatus';
import { markViewed } from '../services/MessageUpdater';
import { viewedReceiptsJobQueue } from '../jobs/viewedReceiptsJobQueue';
import { viewSyncJobQueue } from '../jobs/viewSyncJobQueue';
import type { EmbeddedContactType } from '../types/EmbeddedContact';
import * as VisualAttachment from '../types/VisualAttachment';
import type { AnyViewClass, BasicReactWrapperViewClass } from '../window.d';
import { isNotNil } from '../util/isNotNil';
import { dropNull } from '../util/dropNull';
import { CompositionAPIType } from '../components/CompositionArea';
import * as log from '../logging/log';
import { openLinkInWebBrowser } from '../util/openLinkInWebBrowser';
import { ToastCannotStartGroupCall } from '../components/ToastCannotStartGroupCall';
import { showToast } from '../util/showToast';
import type { AnyViewClass, BasicReactWrapperViewClass } from '../window.d';
import type { EmbeddedContactType } from '../types/EmbeddedContact';
import { AttachmentToastType } from '../types/AttachmentToastType';
import { CompositionAPIType } from '../components/CompositionArea';
import { ReadStatus } from '../messages/MessageReadStatus';
import { SignalService as Proto } from '../protobuf';
import { ToastBlocked } from '../components/ToastBlocked';
import { ToastBlockedGroup } from '../components/ToastBlockedGroup';
import { ToastCannotMixImageAndNonImageAttachments } from '../components/ToastCannotMixImageAndNonImageAttachments';
import { ToastCannotStartGroupCall } from '../components/ToastCannotStartGroupCall';
import { ToastConversationArchived } from '../components/ToastConversationArchived';
import { ToastConversationMarkedUnread } from '../components/ToastConversationMarkedUnread';
import { ToastConversationUnarchived } from '../components/ToastConversationUnarchived';
@ -90,29 +84,34 @@ import { ToastDangerousFileType } from '../components/ToastDangerousFileType';
import { ToastDeleteForEveryoneFailed } from '../components/ToastDeleteForEveryoneFailed';
import { ToastExpired } from '../components/ToastExpired';
import { ToastFileSaved } from '../components/ToastFileSaved';
import { ToastFileSize } from '../components/ToastFileSize';
import { ToastInvalidConversation } from '../components/ToastInvalidConversation';
import { ToastLeftGroup } from '../components/ToastLeftGroup';
import { ToastMaxAttachments } from '../components/ToastMaxAttachments';
import { ToastMessageBodyTooLong } from '../components/ToastMessageBodyTooLong';
import { ToastOneNonImageAtATime } from '../components/ToastOneNonImageAtATime';
import { ToastOriginalMessageNotFound } from '../components/ToastOriginalMessageNotFound';
import { ToastPinnedConversationsFull } from '../components/ToastPinnedConversationsFull';
import { ToastReactionFailed } from '../components/ToastReactionFailed';
import { ToastReportedSpamAndBlocked } from '../components/ToastReportedSpamAndBlocked';
import { ToastTapToViewExpiredIncoming } from '../components/ToastTapToViewExpiredIncoming';
import { ToastTapToViewExpiredOutgoing } from '../components/ToastTapToViewExpiredOutgoing';
import { ToastVoiceNoteLimit } from '../components/ToastVoiceNoteLimit';
import { ToastVoiceNoteMustBeOnlyAttachment } from '../components/ToastVoiceNoteMustBeOnlyAttachment';
import { copyGroupLink } from '../util/copyGroupLink';
import { fileToBytes } from '../util/fileToBytes';
import { AttachmentToastType } from '../types/AttachmentToastType';
import { ToastCannotMixImageAndNonImageAttachments } from '../components/ToastCannotMixImageAndNonImageAttachments';
import { ToastFileSize } from '../components/ToastFileSize';
import { ToastMaxAttachments } from '../components/ToastMaxAttachments';
import { ToastOneNonImageAtATime } from '../components/ToastOneNonImageAtATime';
import { ToastUnableToLoadAttachment } from '../components/ToastUnableToLoadAttachment';
import { autoScale } from '../util/handleImageAttachment';
import { copyGroupLink } from '../util/copyGroupLink';
import { deleteDraftAttachment } from '../util/deleteDraftAttachment';
import { markAllAsApproved } from '../util/markAllAsApproved';
import { markAllAsVerifiedDefault } from '../util/markAllAsVerifiedDefault';
import { retryMessageSend } from '../util/retryMessageSend';
import { dropNull } from '../util/dropNull';
import { fileToBytes } from '../util/fileToBytes';
import { isNotNil } from '../util/isNotNil';
import { markViewed } from '../services/MessageUpdater';
import { openLinkInWebBrowser } from '../util/openLinkInWebBrowser';
import { resolveAttachmentDraftData } from '../util/resolveAttachmentDraftData';
import { showToast } from '../util/showToast';
import { viewSyncJobQueue } from '../jobs/viewSyncJobQueue';
import { viewedReceiptsJobQueue } from '../jobs/viewedReceiptsJobQueue';
type AttachmentOptions = {
messageId: string;
@ -137,7 +136,6 @@ const {
loadStickerData,
openFileInFolder,
readAttachmentData,
readDraftData,
saveAttachmentToDisk,
upgradeMessageSchema,
} = window.Signal.Migrations;
@ -230,7 +228,6 @@ export class ConversationView extends window.Backbone.View<ConversationModel> {
current?: CompositionAPIType;
} = { current: undefined };
private sendStart?: number;
private voiceNoteAttachment?: AttachmentType;
// Quotes
private quote?: QuotedMessageType;
@ -245,7 +242,6 @@ export class ConversationView extends window.Backbone.View<ConversationModel> {
// Sub-views
private captionEditorView?: Backbone.View;
private captureAudioView?: Backbone.View;
private compositionAreaView?: Backbone.View;
private contactModalView?: Backbone.View;
private forwardMessageModal?: Backbone.View;
@ -291,7 +287,6 @@ export class ConversationView extends window.Backbone.View<ConversationModel> {
// These are triggered by background.ts for keyboard handling
this.listenTo(this.model, 'focus-composer', this.focusMessageField);
this.listenTo(this.model, 'open-all-media', this.showAllMedia);
this.listenTo(this.model, 'begin-recording', this.captureAudio);
this.listenTo(this.model, 'attach-file', this.onChooseAttachment);
this.listenTo(this.model, 'escape-pressed', this.resetPanel);
this.listenTo(this.model, 'show-message-details', this.showMessageDetail);
@ -328,7 +323,6 @@ export class ConversationView extends window.Backbone.View<ConversationModel> {
// eslint-disable-next-line class-methods-use-this
events(): Record<string, string> {
return {
'click .capture-audio .microphone': 'captureAudio',
'change input.file-input': 'onChoseAttachment',
drop: 'onDrop',
@ -510,12 +504,6 @@ export class ConversationView extends window.Backbone.View<ConversationModel> {
setupCompositionArea(): void {
window.reduxActions.composer.resetComposer();
const micCellEl = $(`
<div class="capture-audio">
<button class="microphone"></button>
</div>
`)[0];
const messageRequestEnum = Proto.SyncMessage.MessageRequestResponse.Type;
const props = {
@ -524,11 +512,6 @@ export class ConversationView extends window.Backbone.View<ConversationModel> {
onClickAddPack: () => this.showStickerManager(),
onPickSticker: (packId: string, stickerId: number) =>
this.sendStickerMessage({ packId, stickerId }),
onSubmit: (
message: string,
mentions: BodyRangesType,
timestamp: number
) => this.sendMessage(message, mentions, { timestamp }),
onEditorStateChange: (
msg: string,
bodyRanges: Array<BodyRangeType>,
@ -537,7 +520,6 @@ export class ConversationView extends window.Backbone.View<ConversationModel> {
onTextTooLong: () => showToast(ToastMessageBodyTooLong),
getQuotedMessage: () => this.model.get('quotedMessageId'),
clearQuotedMessage: () => this.setQuoteMessage(null),
micCellEl,
onAccept: () => {
this.syncMessageRequestResponse(
'onAccept',
@ -600,6 +582,26 @@ export class ConversationView extends window.Backbone.View<ConversationModel> {
},
openConversation: this.openConversation.bind(this),
onSendMessage: ({
draftAttachments,
mentions = [],
message = '',
timestamp,
voiceNoteAttachment,
}: {
draftAttachments?: ReadonlyArray<AttachmentType>;
mentions?: BodyRangesType;
message?: string;
timestamp?: number;
voiceNoteAttachment?: AttachmentType;
}): void => {
this.sendMessage(message, mentions, {
draftAttachments,
timestamp,
voiceNoteAttachment,
});
},
};
this.compositionAreaView = new Whisper.ReactWrapperView({
@ -1408,9 +1410,6 @@ export class ConversationView extends window.Backbone.View<ConversationModel> {
if (this.stickerPreviewModalView) {
this.stickerPreviewModalView.remove();
}
if (this.captureAudioView) {
this.captureAudioView.remove();
}
if (this.lightboxView) {
this.lightboxView.remove();
}
@ -1582,8 +1581,6 @@ export class ConversationView extends window.Backbone.View<ConversationModel> {
}
async clearAttachments(): Promise<void> {
this.voiceNoteAttachment = undefined;
const draftAttachments = this.model.get('draftAttachments') || [];
this.model.set({
draftAttachments: [],
@ -1610,46 +1607,6 @@ export class ConversationView extends window.Backbone.View<ConversationModel> {
return draftAttachments.some(item => !item.pending);
}
async getFiles(): Promise<Array<AttachmentType>> {
if (this.voiceNoteAttachment) {
// We don't need to pull these off disk; we return them as-is
return [this.voiceNoteAttachment];
}
const draftAttachments = this.model.get('draftAttachments') || [];
const items = await Promise.all(
draftAttachments.map(attachment => this.getFile(attachment))
);
return items.filter(isNotNil);
}
// eslint-disable-next-line class-methods-use-this
async getFile(
attachment?: AttachmentType
): Promise<AttachmentType | undefined> {
if (!attachment || attachment.pending) {
return;
}
if (!attachment.path) {
return;
}
const data = await readDraftData(attachment.path);
if (data.byteLength !== attachment.size) {
log.error(
`Attachment size from disk ${data.byteLength} did not match attachment size ${attachment.size}`
);
return;
}
return {
...attachment,
data,
};
}
updateAttachmentsView(): void {
const draftAttachments = this.model.get('draftAttachments') || [];
window.reduxActions.composer.replaceAttachments(
@ -1661,87 +1618,6 @@ export class ConversationView extends window.Backbone.View<ConversationModel> {
}
}
captureAudio(e?: Event): void {
if (e) {
e.preventDefault();
}
if (this.compositionApi.current?.isDirty()) {
return;
}
if (this.hasFiles({ includePending: true })) {
showToast(ToastVoiceNoteMustBeOnlyAttachment);
return;
}
showToast(ToastVoiceNoteLimit);
// Note - clicking anywhere will close the audio capture panel, due to
// the onClick handler in InboxView, which calls its closeRecording method.
if (this.captureAudioView) {
this.captureAudioView.remove();
this.captureAudioView = undefined;
}
this.captureAudioView = new Whisper.RecorderView();
const view = this.captureAudioView;
view.render();
view.on('send', this.handleAudioCapture.bind(this));
view.on('confirm', this.handleAudioConfirm.bind(this));
view.on('closed', this.endCaptureAudio.bind(this));
view.$el.appendTo(this.$('.capture-audio'));
view.$('.finish').focus();
this.compositionApi.current?.setMicActive(true);
this.disableMessageField();
this.$('.microphone').hide();
}
handleAudioConfirm(blob: Blob, lostFocus?: boolean): void {
window.showConfirmationDialog({
confirmStyle: 'negative',
cancelText: window.i18n('discard'),
message: lostFocus
? window.i18n('voiceRecordingInterruptedBlur')
: window.i18n('voiceRecordingInterruptedMax'),
okText: window.i18n('sendAnyway'),
resolve: async () => {
await this.handleAudioCapture(blob);
},
});
}
async handleAudioCapture(blob: Blob): Promise<void> {
if (this.hasFiles({ includePending: true })) {
throw new Error('A voice note cannot be sent with other attachments');
}
const data = await fileToBytes(blob);
// These aren't persisted to disk; they are meant to be sent immediately
this.voiceNoteAttachment = {
contentType: stringToMIMEType(blob.type),
data,
size: data.byteLength,
flags: Proto.AttachmentPointer.Flags.VOICE_MESSAGE,
};
// Note: The RecorderView removes itself on send
this.captureAudioView = undefined;
this.sendMessage();
}
endCaptureAudio(): void {
this.enableMessageField();
this.$('.microphone').show();
// Note: The RecorderView removes itself on close
this.captureAudioView = undefined;
this.compositionApi.current?.setMicActive(false);
}
async onOpened(messageId: string): Promise<void> {
if (messageId) {
const message = await getMessageById(messageId, {
@ -3377,7 +3253,12 @@ export class ConversationView extends window.Backbone.View<ConversationModel> {
async sendMessage(
message = '',
mentions: BodyRangesType = [],
options: { timestamp?: number; force?: boolean } = {}
options: {
draftAttachments?: ReadonlyArray<AttachmentType>;
force?: boolean;
timestamp?: number;
voiceNoteAttachment?: AttachmentType;
} = {}
): Promise<void> {
const { model }: { model: ConversationModel } = this;
const timestamp = options.timestamp || Date.now();
@ -3418,12 +3299,22 @@ export class ConversationView extends window.Backbone.View<ConversationModel> {
if (
!message.length &&
!this.hasFiles({ includePending: false }) &&
!this.voiceNoteAttachment
!options.voiceNoteAttachment
) {
return;
}
const attachments = await this.getFiles();
let attachments: Array<AttachmentType> = [];
if (options.voiceNoteAttachment) {
attachments = [options.voiceNoteAttachment];
} else if (options.draftAttachments) {
attachments = (
await Promise.all(
options.draftAttachments.map(resolveAttachmentDraftData)
)
).filter(isNotNil);
}
const sendHQImages =
window.reduxStore &&
window.reduxStore.getState().composer.shouldSendHighQualityAttachments;

View file

@ -1,143 +0,0 @@
// Copyright 2016-2021 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import moment from 'moment';
import * as log from '../logging/log';
window.Whisper = window.Whisper || {};
const { Whisper } = window;
Whisper.RecorderView = Whisper.View.extend({
className: 'recorder clearfix',
template: () => $('#recorder').html(),
initialize() {
this.startTime = Date.now();
this.interval = setInterval(this.updateTime.bind(this), 1000);
this.onSwitchAwayBound = this.onSwitchAway.bind(this);
$(window).on('blur', this.onSwitchAwayBound);
this.handleKeyDownBound = this.handleKeyDown.bind(this);
this.$el.on('keydown', this.handleKeyDownBound);
this.start();
},
events: {
'click .close': 'remove',
'click .finish': 'finish',
close: 'remove',
},
onSwitchAway() {
this.lostFocus = true;
this.recorder.finishRecording();
},
handleKeyDown(event: KeyboardEvent) {
if (event.key === 'Escape') {
this.remove();
event.preventDefault();
event.stopPropagation();
}
},
updateTime() {
const duration = moment.duration(Date.now() - this.startTime, 'ms');
const minutes = `${Math.trunc(duration.asMinutes())}`;
let seconds = `${duration.seconds()}`;
if (seconds.length < 2) {
seconds = `0${seconds}`;
}
this.$('.time').text(`${minutes}:${seconds}`);
},
async remove() {
// Note: the 'close' event can be triggered by InboxView, when the user clicks
// anywhere outside the recording pane.
if (this.recorder.isRecording()) {
this.recorder.cancelRecording();
}
// Reach in and terminate the web worker used by WebAudioRecorder, otherwise
// it gets leaked due to a reference cycle with its onmessage listener
this.recorder.worker.terminate();
this.recorder = null;
if (this.interval) {
clearInterval(this.interval);
}
this.interval = null;
if (this.source) {
this.source.disconnect();
}
this.source = null;
if (this.context) {
await this.context.close();
log.info('audio context closed');
}
this.context = null;
Whisper.View.prototype.remove.call(this);
this.trigger('closed');
$(window).off('blur', this.onSwitchAwayBound);
this.$el.off('keydown', this.handleKeyDownBound);
},
finish() {
this.clickedFinish = true;
this.recorder.finishRecording();
},
handleBlob(_: unknown, blob: Blob) {
if (blob && this.clickedFinish) {
this.trigger('send', blob);
} else if (blob) {
this.trigger('confirm', blob, this.lostFocus);
}
this.remove();
},
async start() {
this.lostFocus = false;
this.clickedFinish = false;
this.context = new AudioContext();
this.input = this.context.createGain();
this.recorder = new window.WebAudioRecorder(this.input, {
encoding: 'mp3',
workerDir: 'js/', // must end with slash
});
this.recorder.onComplete = this.handleBlob.bind(this);
this.recorder.onError = this.onError.bind(this);
this.recorder.onTimeout = this.onTimeout.bind(this);
try {
const stream = await navigator.mediaDevices.getUserMedia({ audio: true });
this.source = this.context.createMediaStreamSource(stream);
this.source.connect(this.input);
this.recorder.startRecording();
} catch (err) {
this.onError(err);
}
},
onTimeout() {
this.recorder.finishRecording();
},
onError(error: Error) {
// Protect against out-of-band errors, which can happen if the user revokes media
// permissions after successfully accessing the microphone.
if (!this.recorder) {
return;
}
this.remove();
if (error && error.name === 'NotAllowedError') {
log.warn('RecorderView.onError: Microphone access is not allowed!');
window.showPermissionsPopup();
} else {
log.error(
'RecorderView.onError:',
error && error.stack ? error.stack : error
);
}
},
});

26
ts/window.d.ts vendored
View file

@ -135,6 +135,29 @@ type ConfirmationDialogViewProps = {
resolve: () => void;
};
export declare class WebAudioRecorderClass {
constructor(
node: GainNode,
options: {
encoding: string;
workerDir: string;
options?: { timeLimit?: number };
}
);
// Callbacks
onComplete?: (recorder: WebAudioRecorderClass, blob: Blob) => unknown;
onError?: (recorder: WebAudioRecorderClass, error: Error) => unknown;
onTimeout?: () => unknown;
// Class properties
startRecording: () => unknown;
finishRecording: () => unknown;
isRecording: () => boolean;
cancelRecording: () => unknown;
worker: Worker;
}
declare global {
// We want to extend `window`'s properties, so we need an interface.
// eslint-disable-next-line no-restricted-syntax
@ -142,7 +165,6 @@ declare global {
startApp: () => void;
QRCode: any;
WebAudioRecorder: any;
closeDebugLog: () => unknown;
removeSetupMenuItems: () => unknown;
showPermissionsPopup: () => unknown;
@ -164,6 +186,7 @@ declare global {
render: (template: string, data: any, partials?: any) => string;
parse: (template: string) => void;
};
WebAudioRecorder: typeof WebAudioRecorderClass;
WhatIsThis: WhatIsThis;
@ -595,7 +618,6 @@ export type WhisperType = {
InstallView: typeof AnyViewClass;
KeyVerificationPanelView: typeof AnyViewClass;
ReactWrapperView: typeof BasicReactWrapperViewClass;
RecorderView: typeof AnyViewClass;
SafetyNumberChangeDialogView: typeof AnyViewClass;
StandaloneRegistrationView: typeof AnyViewClass;
View: typeof AnyViewClass;