Voice notes drafts
This commit is contained in:
parent
356fb301e1
commit
99015d7b96
48 changed files with 2113 additions and 909 deletions
|
@ -51,6 +51,14 @@
|
||||||
"message": "Adding $contact$...",
|
"message": "Adding $contact$...",
|
||||||
"description": "Shown in toast while a user is being added to a group"
|
"description": "Shown in toast while a user is being added to a group"
|
||||||
},
|
},
|
||||||
|
"icu:RecordingComposer__cancel": {
|
||||||
|
"messageformat": "Cancel",
|
||||||
|
"description": "Label of cancel button on voice note recording UI"
|
||||||
|
},
|
||||||
|
"icu:RecordingComposer__send": {
|
||||||
|
"messageformat": "Send",
|
||||||
|
"description": "Label of send button on voice note recording UI"
|
||||||
|
},
|
||||||
"GroupListItem__message-default": {
|
"GroupListItem__message-default": {
|
||||||
"message": "$count$ members",
|
"message": "$count$ members",
|
||||||
"description": "Shown below the group name when selecting a group to invite a contact to"
|
"description": "Shown below the group name when selecting a group to invite a contact to"
|
||||||
|
|
42
stylesheets/components/CompositionRecording.scss
Normal file
42
stylesheets/components/CompositionRecording.scss
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
.CompositionRecording {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
padding: 10px 18px;
|
||||||
|
|
||||||
|
&__wave {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
flex: 1;
|
||||||
|
border-radius: 16px;
|
||||||
|
height: 32px;
|
||||||
|
padding: 6px 12px;
|
||||||
|
|
||||||
|
@include light-theme {
|
||||||
|
background: $color-gray-05;
|
||||||
|
}
|
||||||
|
@include dark-theme {
|
||||||
|
background: $color-gray-75;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&__microphone {
|
||||||
|
display: inline-block;
|
||||||
|
height: 20px;
|
||||||
|
width: 20px;
|
||||||
|
@include color-svg(
|
||||||
|
'../images/icons/v2/mic-solid-24.svg',
|
||||||
|
$color-accent-red
|
||||||
|
);
|
||||||
|
animation: pulse 2s infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
&__timer {
|
||||||
|
min-width: 40px;
|
||||||
|
text-align: right;
|
||||||
|
}
|
||||||
|
}
|
17
stylesheets/components/CompositionRecordingDraft.scss
Normal file
17
stylesheets/components/CompositionRecordingDraft.scss
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
.CompositionRecordingDraft {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
padding: 10px 18px;
|
||||||
|
|
||||||
|
&__sizer {
|
||||||
|
// ignore the content size
|
||||||
|
// size based on the parent
|
||||||
|
flex: 1;
|
||||||
|
flex-basis: 0;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
}
|
|
@ -9,6 +9,12 @@ $audio-attachment-button-margin-small: 4px;
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
margin-top: 2px;
|
margin-top: 2px;
|
||||||
|
|
||||||
|
.PlaybackButton {
|
||||||
|
@media (min-width: 0px) and (max-width: 799px) {
|
||||||
|
margin-right: $audio-attachment-button-margin-small;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.module-message__audio-attachment__button-and-waveform {
|
.module-message__audio-attachment__button-and-waveform {
|
||||||
|
@ -81,125 +87,6 @@ $audio-attachment-button-margin-small: 4px;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.module-message__audio-attachment__playback-rate-button {
|
|
||||||
@include button-reset;
|
|
||||||
@include font-body-2-bold;
|
|
||||||
|
|
||||||
width: 38px;
|
|
||||||
height: 18px;
|
|
||||||
text-align: center;
|
|
||||||
font-weight: 700;
|
|
||||||
border-radius: 4px;
|
|
||||||
font-size: 11px;
|
|
||||||
padding: 1px 2px;
|
|
||||||
margin: -2px 0;
|
|
||||||
line-height: 16px;
|
|
||||||
letter-spacing: 0.05px;
|
|
||||||
user-select: none;
|
|
||||||
|
|
||||||
.module-message__audio-attachment--incoming & {
|
|
||||||
@include light-theme {
|
|
||||||
color: $color-gray-60;
|
|
||||||
background: $color-black-alpha-08;
|
|
||||||
}
|
|
||||||
@include dark-theme {
|
|
||||||
color: $color-gray-25;
|
|
||||||
background: $color-white-alpha-08;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.module-message__audio-attachment--outgoing & {
|
|
||||||
color: $color-white-alpha-80;
|
|
||||||
background: $color-white-alpha-20;
|
|
||||||
}
|
|
||||||
|
|
||||||
&::after {
|
|
||||||
content: '';
|
|
||||||
display: inline-block;
|
|
||||||
width: 8px;
|
|
||||||
height: 8px;
|
|
||||||
margin-left: 2px;
|
|
||||||
|
|
||||||
@mixin x-icon($color) {
|
|
||||||
@include color-svg('../images/icons/v2/x-8.svg', $color, false);
|
|
||||||
}
|
|
||||||
|
|
||||||
.module-message__audio-attachment--incoming & {
|
|
||||||
@include light-theme {
|
|
||||||
@include x-icon($color-gray-60);
|
|
||||||
}
|
|
||||||
@include dark-theme {
|
|
||||||
@include x-icon($color-gray-25);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
.module-message__audio-attachment--outgoing & {
|
|
||||||
@include x-icon($color-white-alpha-80);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.module-message__audio-attachment__play-button,
|
|
||||||
.module-message__audio-attachment__spinner {
|
|
||||||
@include button-reset;
|
|
||||||
|
|
||||||
flex-shrink: 0;
|
|
||||||
width: $audio-attachment-button-size;
|
|
||||||
height: $audio-attachment-button-size;
|
|
||||||
margin-right: $audio-attachment-button-margin-big;
|
|
||||||
|
|
||||||
outline: none;
|
|
||||||
border-radius: 18px;
|
|
||||||
|
|
||||||
@media (min-width: 0px) and (max-width: 799px) {
|
|
||||||
margin-right: $audio-attachment-button-margin-small;
|
|
||||||
}
|
|
||||||
|
|
||||||
&::before {
|
|
||||||
display: block;
|
|
||||||
height: 100%;
|
|
||||||
content: '';
|
|
||||||
}
|
|
||||||
|
|
||||||
@mixin audio-icon($name, $icon, $color) {
|
|
||||||
&--#{$name}::before {
|
|
||||||
@include color-svg('../images/icons/v2/#{$icon}.svg', $color, false);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@mixin all-audio-icons($color) {
|
|
||||||
@include audio-icon(play, play-solid-20, $color);
|
|
||||||
@include audio-icon(pause, pause-solid-20, $color);
|
|
||||||
@include audio-icon(download, arrow-down-20, $color);
|
|
||||||
@include audio-icon(pending, audio-spinner-arc-22, $color);
|
|
||||||
}
|
|
||||||
|
|
||||||
&--pending {
|
|
||||||
cursor: auto;
|
|
||||||
}
|
|
||||||
|
|
||||||
&--pending::before {
|
|
||||||
animation: rotate 1000ms linear infinite;
|
|
||||||
}
|
|
||||||
|
|
||||||
.module-message__audio-attachment--incoming & {
|
|
||||||
@include light-theme {
|
|
||||||
background: $color-white;
|
|
||||||
|
|
||||||
@include all-audio-icons($color-gray-60);
|
|
||||||
}
|
|
||||||
|
|
||||||
@include dark-theme {
|
|
||||||
background: $color-gray-60;
|
|
||||||
|
|
||||||
@include all-audio-icons($color-gray-15);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.module-message__audio-attachment--outgoing & {
|
|
||||||
background: $color-white-alpha-20;
|
|
||||||
@include all-audio-icons($color-white);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.module-message__audio-attachment__waveform {
|
.module-message__audio-attachment__waveform {
|
||||||
flex-shrink: 0;
|
flex-shrink: 0;
|
||||||
|
|
||||||
|
@ -210,9 +97,8 @@ $audio-attachment-button-margin-small: 4px;
|
||||||
outline: 0;
|
outline: 0;
|
||||||
}
|
}
|
||||||
|
|
||||||
.module-message__audio-attachment__play-button,
|
.PlaybackButton,
|
||||||
.module-message__audio-attachment__playback-rate-button,
|
.PlaybackRateButton,
|
||||||
.module-message__audio-attachment__spinner,
|
|
||||||
.module-message__audio-attachment__waveform {
|
.module-message__audio-attachment__waveform {
|
||||||
&:focus {
|
&:focus {
|
||||||
@include keyboard-mode {
|
@include keyboard-mode {
|
||||||
|
@ -229,44 +115,6 @@ $audio-attachment-button-margin-small: 4px;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
.module-message__audio-attachment__waveform__bar {
|
|
||||||
display: inline-block;
|
|
||||||
|
|
||||||
width: 2px;
|
|
||||||
border-radius: 2px;
|
|
||||||
transition: height 250ms, background 250ms;
|
|
||||||
|
|
||||||
&:not(:first-of-type) {
|
|
||||||
margin-left: 2px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.module-message__audio-attachment--incoming & {
|
|
||||||
@include light-theme {
|
|
||||||
background: $color-black-alpha-40;
|
|
||||||
|
|
||||||
&--active {
|
|
||||||
background: $color-black-alpha-80;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@include dark-theme {
|
|
||||||
background: $color-white-alpha-40;
|
|
||||||
|
|
||||||
&--active {
|
|
||||||
background: $color-white-alpha-70;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.module-message__audio-attachment--outgoing & {
|
|
||||||
background: $color-white-alpha-40;
|
|
||||||
|
|
||||||
&--active {
|
|
||||||
background: $color-white-alpha-80;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
.module-message__audio-attachment__metadata {
|
.module-message__audio-attachment__metadata {
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-direction: row;
|
flex-direction: row;
|
||||||
|
|
84
stylesheets/components/PlaybackButton.scss
Normal file
84
stylesheets/components/PlaybackButton.scss
Normal file
|
@ -0,0 +1,84 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
.PlaybackButton {
|
||||||
|
@include button-reset;
|
||||||
|
|
||||||
|
flex-shrink: 0;
|
||||||
|
margin-right: $audio-attachment-button-margin-big;
|
||||||
|
|
||||||
|
outline: none;
|
||||||
|
border-radius: 18px;
|
||||||
|
|
||||||
|
&::before {
|
||||||
|
display: block;
|
||||||
|
height: 100%;
|
||||||
|
content: '';
|
||||||
|
}
|
||||||
|
|
||||||
|
@mixin audio-icon($name, $icon, $color) {
|
||||||
|
&.PlaybackButton--#{$name}::before {
|
||||||
|
@include color-svg('../images/icons/v2/#{$icon}.svg', $color, false);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@mixin all-audio-icons($color) {
|
||||||
|
@include audio-icon(play, play-solid-20, $color);
|
||||||
|
@include audio-icon(pause, pause-solid-20, $color);
|
||||||
|
@include audio-icon(download, arrow-down-20, $color);
|
||||||
|
@include audio-icon(pending, audio-spinner-arc-22, $color);
|
||||||
|
}
|
||||||
|
|
||||||
|
&--variant-message {
|
||||||
|
width: $audio-attachment-button-size;
|
||||||
|
height: $audio-attachment-button-size;
|
||||||
|
}
|
||||||
|
|
||||||
|
&--variant-mini {
|
||||||
|
&::before {
|
||||||
|
-webkit-mask-size: 100% !important;
|
||||||
|
}
|
||||||
|
width: 14px;
|
||||||
|
height: 14px;
|
||||||
|
}
|
||||||
|
&--variant-draft {
|
||||||
|
&::before {
|
||||||
|
-webkit-mask-size: 100% !important;
|
||||||
|
}
|
||||||
|
width: 18px;
|
||||||
|
height: 18px;
|
||||||
|
}
|
||||||
|
|
||||||
|
&--pending {
|
||||||
|
cursor: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
&--pending::before {
|
||||||
|
animation: rotate 1000ms linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@include light-theme {
|
||||||
|
&--context-incoming {
|
||||||
|
&.PlaybackButton--variant-message {
|
||||||
|
background: $color-white;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@include all-audio-icons($color-gray-60);
|
||||||
|
}
|
||||||
|
|
||||||
|
@include dark-theme {
|
||||||
|
&--context-incoming {
|
||||||
|
&.PlaybackButton--variant-message {
|
||||||
|
background: $color-gray-60;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@include all-audio-icons($color-gray-15);
|
||||||
|
}
|
||||||
|
|
||||||
|
&--context-outgoing {
|
||||||
|
&.PlaybackButton--variant-message {
|
||||||
|
background: $color-white-alpha-20;
|
||||||
|
}
|
||||||
|
@include all-audio-icons($color-white);
|
||||||
|
}
|
||||||
|
}
|
34
stylesheets/components/RecordingComposer.scss
Normal file
34
stylesheets/components/RecordingComposer.scss
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
.RecordingComposer {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 12px;
|
||||||
|
padding: 10px 18px;
|
||||||
|
|
||||||
|
&__content {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 8px;
|
||||||
|
flex: 1;
|
||||||
|
border-radius: 16px;
|
||||||
|
height: 32px;
|
||||||
|
padding: 6px 12px;
|
||||||
|
overflow: hidden;
|
||||||
|
|
||||||
|
@include light-theme {
|
||||||
|
background: $color-gray-05;
|
||||||
|
}
|
||||||
|
@include dark-theme {
|
||||||
|
background: $color-gray-75;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
&__button {
|
||||||
|
font-size: 13px;
|
||||||
|
min-width: 76px;
|
||||||
|
line-height: 18px;
|
||||||
|
padding: 5px 16px;
|
||||||
|
}
|
||||||
|
}
|
62
stylesheets/components/Waveform.scss
Normal file
62
stylesheets/components/Waveform.scss
Normal file
|
@ -0,0 +1,62 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
.Waveform {
|
||||||
|
flex-shrink: 0;
|
||||||
|
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
cursor: pointer;
|
||||||
|
|
||||||
|
outline: 0;
|
||||||
|
|
||||||
|
&__bar {
|
||||||
|
display: inline-block;
|
||||||
|
|
||||||
|
width: 2px;
|
||||||
|
border-radius: 2px;
|
||||||
|
transition: height 250ms, background 250ms;
|
||||||
|
|
||||||
|
&:not(:first-of-type) {
|
||||||
|
margin-left: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
@include light-theme {
|
||||||
|
background: $color-black-alpha-40;
|
||||||
|
|
||||||
|
&--active {
|
||||||
|
background: $color-black-alpha-80;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@include dark-theme {
|
||||||
|
background: $color-white-alpha-40;
|
||||||
|
|
||||||
|
&--active {
|
||||||
|
background: $color-white-alpha-80;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.module-message__audio-attachment--incoming & {
|
||||||
|
@include light-theme {
|
||||||
|
&--active {
|
||||||
|
background: $color-black-alpha-80;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@include dark-theme {
|
||||||
|
&--active {
|
||||||
|
background: $color-white-alpha-70;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.module-message__audio-attachment--outgoing & {
|
||||||
|
background: $color-white-alpha-40;
|
||||||
|
|
||||||
|
&--active {
|
||||||
|
background: $color-white-alpha-80;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
8
stylesheets/components/WaveformScrubber.scss
Normal file
8
stylesheets/components/WaveformScrubber.scss
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
.WaveformScrubber {
|
||||||
|
display: flex;
|
||||||
|
flex: 1;
|
||||||
|
flex-basis: 0;
|
||||||
|
}
|
|
@ -51,6 +51,8 @@
|
||||||
@import './components/Checkbox.scss';
|
@import './components/Checkbox.scss';
|
||||||
@import './components/CircleCheckbox.scss';
|
@import './components/CircleCheckbox.scss';
|
||||||
@import './components/CompositionArea.scss';
|
@import './components/CompositionArea.scss';
|
||||||
|
@import './components/CompositionRecording.scss';
|
||||||
|
@import './components/CompositionRecordingDraft.scss';
|
||||||
@import './components/CompositionInput.scss';
|
@import './components/CompositionInput.scss';
|
||||||
@import './components/CompositionTextArea.scss';
|
@import './components/CompositionTextArea.scss';
|
||||||
@import './components/ContactModal.scss';
|
@import './components/ContactModal.scss';
|
||||||
|
@ -100,11 +102,13 @@
|
||||||
@import './components/MyStories.scss';
|
@import './components/MyStories.scss';
|
||||||
@import './components/OutgoingGiftBadgeModal.scss';
|
@import './components/OutgoingGiftBadgeModal.scss';
|
||||||
@import './components/PermissionsPopup.scss';
|
@import './components/PermissionsPopup.scss';
|
||||||
|
@import './components/PlaybackButton.scss';
|
||||||
@import './components/PlaybackRateButton.scss';
|
@import './components/PlaybackRateButton.scss';
|
||||||
@import './components/Preferences.scss';
|
@import './components/Preferences.scss';
|
||||||
@import './components/ProfileEditor.scss';
|
@import './components/ProfileEditor.scss';
|
||||||
@import './components/Quote.scss';
|
@import './components/Quote.scss';
|
||||||
@import './components/ReactionPickerPicker.scss';
|
@import './components/ReactionPickerPicker.scss';
|
||||||
|
@import './components/RecordingComposer.scss';
|
||||||
@import './components/SafetyNumberChangeDialog.scss';
|
@import './components/SafetyNumberChangeDialog.scss';
|
||||||
@import './components/SafetyNumberViewer.scss';
|
@import './components/SafetyNumberViewer.scss';
|
||||||
@import './components/ScrollDownButton.scss';
|
@import './components/ScrollDownButton.scss';
|
||||||
|
@ -136,5 +140,7 @@
|
||||||
@import './components/TimelineWarnings.scss';
|
@import './components/TimelineWarnings.scss';
|
||||||
@import './components/TitleBarContainer.scss';
|
@import './components/TitleBarContainer.scss';
|
||||||
@import './components/Toast.scss';
|
@import './components/Toast.scss';
|
||||||
|
@import './components/Waveform.scss';
|
||||||
|
@import './components/WaveformScrubber.scss';
|
||||||
@import './components/UsernameOnboardingModalBody.scss';
|
@import './components/UsernameOnboardingModalBody.scss';
|
||||||
@import './components/WhatsNew.scss';
|
@import './components/WhatsNew.scss';
|
||||||
|
|
|
@ -126,6 +126,8 @@ const useProps = (overrideProps: Partial<Props> = {}): Props => ({
|
||||||
// SMS-only
|
// SMS-only
|
||||||
isSMSOnly: overrideProps.isSMSOnly || false,
|
isSMSOnly: overrideProps.isSMSOnly || false,
|
||||||
isFetchingUUID: overrideProps.isFetchingUUID || false,
|
isFetchingUUID: overrideProps.isFetchingUUID || false,
|
||||||
|
renderSmartCompositionRecording: _ => <div>RECORDING</div>,
|
||||||
|
renderSmartCompositionRecordingDraft: _ => <div>RECORDING DRAFT</div>,
|
||||||
});
|
});
|
||||||
|
|
||||||
export function Default(): JSX.Element {
|
export function Default(): JSX.Element {
|
||||||
|
|
|
@ -37,7 +37,7 @@ import type {
|
||||||
AttachmentDraftType,
|
AttachmentDraftType,
|
||||||
InMemoryAttachmentDraftType,
|
InMemoryAttachmentDraftType,
|
||||||
} from '../types/Attachment';
|
} from '../types/Attachment';
|
||||||
import { isImageAttachment } from '../types/Attachment';
|
import { isImageAttachment, isVoiceMessage } from '../types/Attachment';
|
||||||
import { AudioCapture } from './conversation/AudioCapture';
|
import { AudioCapture } from './conversation/AudioCapture';
|
||||||
import { CompositionUpload } from './CompositionUpload';
|
import { CompositionUpload } from './CompositionUpload';
|
||||||
import type {
|
import type {
|
||||||
|
@ -62,7 +62,9 @@ import { isImageTypeSupported } from '../util/GoogleChrome';
|
||||||
import * as KeyboardLayout from '../services/keyboardLayout';
|
import * as KeyboardLayout from '../services/keyboardLayout';
|
||||||
import { usePrevious } from '../hooks/usePrevious';
|
import { usePrevious } from '../hooks/usePrevious';
|
||||||
import { PanelType } from '../types/Panels';
|
import { PanelType } from '../types/Panels';
|
||||||
|
import type { SmartCompositionRecordingDraftProps } from '../state/smart/CompositionRecordingDraft';
|
||||||
import { useEscapeHandling } from '../hooks/useEscapeHandling';
|
import { useEscapeHandling } from '../hooks/useEscapeHandling';
|
||||||
|
import type { SmartCompositionRecordingProps } from '../state/smart/CompositionRecording';
|
||||||
|
|
||||||
export type OwnProps = Readonly<{
|
export type OwnProps = Readonly<{
|
||||||
acceptedMessageRequest?: boolean;
|
acceptedMessageRequest?: boolean;
|
||||||
|
@ -77,7 +79,7 @@ export type OwnProps = Readonly<{
|
||||||
cancelRecording: () => unknown;
|
cancelRecording: () => unknown;
|
||||||
completeRecording: (
|
completeRecording: (
|
||||||
conversationId: string,
|
conversationId: string,
|
||||||
onSendAudioRecording?: (rec: InMemoryAttachmentDraftType) => unknown
|
onRecordingComplete: (rec: InMemoryAttachmentDraftType) => unknown
|
||||||
) => unknown;
|
) => unknown;
|
||||||
conversationId: string;
|
conversationId: string;
|
||||||
uuid?: string;
|
uuid?: string;
|
||||||
|
@ -138,6 +140,12 @@ export type OwnProps = Readonly<{
|
||||||
showConversation: ShowConversationType;
|
showConversation: ShowConversationType;
|
||||||
startRecording: (id: string) => unknown;
|
startRecording: (id: string) => unknown;
|
||||||
theme: ThemeType;
|
theme: ThemeType;
|
||||||
|
renderSmartCompositionRecording: (
|
||||||
|
props: SmartCompositionRecordingProps
|
||||||
|
) => JSX.Element;
|
||||||
|
renderSmartCompositionRecordingDraft: (
|
||||||
|
props: SmartCompositionRecordingDraftProps
|
||||||
|
) => JSX.Element | null;
|
||||||
}>;
|
}>;
|
||||||
|
|
||||||
export type Props = Pick<
|
export type Props = Pick<
|
||||||
|
@ -196,10 +204,6 @@ export function CompositionArea({
|
||||||
draftAttachments,
|
draftAttachments,
|
||||||
onClearAttachments,
|
onClearAttachments,
|
||||||
// AudioCapture
|
// AudioCapture
|
||||||
cancelRecording,
|
|
||||||
completeRecording,
|
|
||||||
errorDialogAudioRecorderType,
|
|
||||||
errorRecording,
|
|
||||||
recordingState,
|
recordingState,
|
||||||
startRecording,
|
startRecording,
|
||||||
// StagedLinkPreview
|
// StagedLinkPreview
|
||||||
|
@ -266,7 +270,9 @@ export function CompositionArea({
|
||||||
// SMS-only contacts
|
// SMS-only contacts
|
||||||
isSMSOnly,
|
isSMSOnly,
|
||||||
isFetchingUUID,
|
isFetchingUUID,
|
||||||
}: Props): JSX.Element {
|
renderSmartCompositionRecording,
|
||||||
|
renderSmartCompositionRecordingDraft,
|
||||||
|
}: Props): JSX.Element | null {
|
||||||
const [dirty, setDirty] = useState(false);
|
const [dirty, setDirty] = useState(false);
|
||||||
const [large, setLarge] = useState(false);
|
const [large, setLarge] = useState(false);
|
||||||
const [attachmentToEdit, setAttachmentToEdit] = useState<
|
const [attachmentToEdit, setAttachmentToEdit] = useState<
|
||||||
|
@ -418,20 +424,9 @@ export function CompositionArea({
|
||||||
const micButtonFragment = shouldShowMicrophone ? (
|
const micButtonFragment = shouldShowMicrophone ? (
|
||||||
<div className="CompositionArea__button-cell">
|
<div className="CompositionArea__button-cell">
|
||||||
<AudioCapture
|
<AudioCapture
|
||||||
cancelRecording={cancelRecording}
|
|
||||||
completeRecording={completeRecording}
|
|
||||||
conversationId={conversationId}
|
conversationId={conversationId}
|
||||||
draftAttachments={draftAttachments}
|
draftAttachments={draftAttachments}
|
||||||
errorDialogAudioRecorderType={errorDialogAudioRecorderType}
|
|
||||||
errorRecording={errorRecording}
|
|
||||||
i18n={i18n}
|
i18n={i18n}
|
||||||
recordingState={recordingState}
|
|
||||||
onSendAudioRecording={(
|
|
||||||
voiceNoteAttachment: InMemoryAttachmentDraftType
|
|
||||||
) => {
|
|
||||||
emojiButtonRef.current?.close();
|
|
||||||
sendMultiMediaMessage(conversationId, { voiceNoteAttachment });
|
|
||||||
}}
|
|
||||||
startRecording={startRecording}
|
startRecording={startRecording}
|
||||||
/>
|
/>
|
||||||
</div>
|
</div>
|
||||||
|
@ -517,6 +512,10 @@ export function CompositionArea({
|
||||||
};
|
};
|
||||||
}, [setLarge]);
|
}, [setLarge]);
|
||||||
|
|
||||||
|
const handleRecordingBeforeSend = useCallback(() => {
|
||||||
|
emojiButtonRef.current?.close();
|
||||||
|
}, [emojiButtonRef]);
|
||||||
|
|
||||||
const clearQuote = useCallback(() => {
|
const clearQuote = useCallback(() => {
|
||||||
if (quotedMessageId) {
|
if (quotedMessageId) {
|
||||||
setQuoteByMessageId(conversationId, undefined);
|
setQuoteByMessageId(conversationId, undefined);
|
||||||
|
@ -633,6 +632,20 @@ export function CompositionArea({
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (isRecording) {
|
||||||
|
return renderSmartCompositionRecording({
|
||||||
|
onBeforeSend: handleRecordingBeforeSend,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
if (draftAttachments.length === 1 && isVoiceMessage(draftAttachments[0])) {
|
||||||
|
const voiceNoteAttachment = draftAttachments[0];
|
||||||
|
|
||||||
|
if (!voiceNoteAttachment.pending && voiceNoteAttachment.url) {
|
||||||
|
return renderSmartCompositionRecordingDraft({ voiceNoteAttachment });
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="CompositionArea">
|
<div className="CompositionArea">
|
||||||
{attachmentToEdit &&
|
{attachmentToEdit &&
|
||||||
|
|
57
ts/components/CompositionRecording.stories.tsx
Normal file
57
ts/components/CompositionRecording.stories.tsx
Normal file
|
@ -0,0 +1,57 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
import React, { useState } from 'react';
|
||||||
|
import { action } from '@storybook/addon-actions';
|
||||||
|
import { CompositionRecording } from './CompositionRecording';
|
||||||
|
|
||||||
|
import { setupI18n } from '../util/setupI18n';
|
||||||
|
import enMessages from '../../_locales/en/messages.json';
|
||||||
|
|
||||||
|
const i18n = setupI18n('en', enMessages);
|
||||||
|
|
||||||
|
export default {
|
||||||
|
title: 'components/CompositionRecording',
|
||||||
|
component: CompositionRecording,
|
||||||
|
};
|
||||||
|
|
||||||
|
export function Default(): JSX.Element {
|
||||||
|
const [active, setActive] = useState(false);
|
||||||
|
|
||||||
|
const cancel = action('cancel');
|
||||||
|
const send = action('send');
|
||||||
|
|
||||||
|
const handleActivate = () => {
|
||||||
|
setActive(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleCancel = () => {
|
||||||
|
cancel();
|
||||||
|
setActive(false);
|
||||||
|
};
|
||||||
|
const handleSend = () => {
|
||||||
|
send();
|
||||||
|
setActive(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
{!active && (
|
||||||
|
<button type="button" onClick={handleActivate}>
|
||||||
|
Activate
|
||||||
|
</button>
|
||||||
|
)}
|
||||||
|
{active && (
|
||||||
|
<CompositionRecording
|
||||||
|
i18n={i18n}
|
||||||
|
conversationId="convo-id"
|
||||||
|
onCancel={handleCancel}
|
||||||
|
onSend={handleSend}
|
||||||
|
errorRecording={_ => action('error')()}
|
||||||
|
addAttachment={action('addAttachment')}
|
||||||
|
completeRecording={action('completeRecording')}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
156
ts/components/CompositionRecording.tsx
Normal file
156
ts/components/CompositionRecording.tsx
Normal file
|
@ -0,0 +1,156 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
import { noop } from 'lodash';
|
||||||
|
import React, { useCallback, useEffect, useRef, useState } from 'react';
|
||||||
|
import { useEscapeHandling } from '../hooks/useEscapeHandling';
|
||||||
|
import { usePrevious } from '../hooks/usePrevious';
|
||||||
|
import type { InMemoryAttachmentDraftType } from '../types/Attachment';
|
||||||
|
import { ErrorDialogAudioRecorderType } from '../types/AudioRecorder';
|
||||||
|
import type { LocalizerType } from '../types/Util';
|
||||||
|
import { DurationInSeconds, SECOND } from '../util/durations';
|
||||||
|
import { durationToPlaybackText } from '../util/durationToPlaybackText';
|
||||||
|
import { ConfirmationDialog } from './ConfirmationDialog';
|
||||||
|
import { RecordingComposer } from './RecordingComposer';
|
||||||
|
import { ToastVoiceNoteLimit } from './ToastVoiceNoteLimit';
|
||||||
|
|
||||||
|
export type Props = {
|
||||||
|
i18n: LocalizerType;
|
||||||
|
conversationId: string;
|
||||||
|
onCancel: () => void;
|
||||||
|
onSend: () => void;
|
||||||
|
errorRecording: (e: ErrorDialogAudioRecorderType) => unknown;
|
||||||
|
errorDialogAudioRecorderType?: ErrorDialogAudioRecorderType;
|
||||||
|
addAttachment: (
|
||||||
|
conversationId: string,
|
||||||
|
attachment: InMemoryAttachmentDraftType
|
||||||
|
) => unknown;
|
||||||
|
completeRecording: (
|
||||||
|
conversationId: string,
|
||||||
|
onRecordingComplete: (rec: InMemoryAttachmentDraftType) => unknown
|
||||||
|
) => unknown;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function CompositionRecording({
|
||||||
|
i18n,
|
||||||
|
conversationId,
|
||||||
|
onCancel,
|
||||||
|
onSend,
|
||||||
|
errorRecording,
|
||||||
|
errorDialogAudioRecorderType,
|
||||||
|
addAttachment,
|
||||||
|
completeRecording,
|
||||||
|
}: Props): JSX.Element {
|
||||||
|
useEscapeHandling(onCancel);
|
||||||
|
|
||||||
|
const [showVoiceNoteLimitToast, setShowVoiceNoteLimitToast] = useState(true);
|
||||||
|
|
||||||
|
// when interrupted (blur, switching convos)
|
||||||
|
// stop recording and save draft
|
||||||
|
const handleRecordingInterruption = useCallback(() => {
|
||||||
|
completeRecording(conversationId, attachment => {
|
||||||
|
addAttachment(conversationId, attachment);
|
||||||
|
});
|
||||||
|
}, [conversationId, completeRecording, addAttachment]);
|
||||||
|
|
||||||
|
// switched to another app
|
||||||
|
useEffect(() => {
|
||||||
|
window.addEventListener('blur', handleRecordingInterruption);
|
||||||
|
return () => {
|
||||||
|
window.removeEventListener('blur', handleRecordingInterruption);
|
||||||
|
};
|
||||||
|
}, [handleRecordingInterruption]);
|
||||||
|
|
||||||
|
// switched conversations
|
||||||
|
const previousConversationId = usePrevious(conversationId, conversationId);
|
||||||
|
useEffect(() => {
|
||||||
|
if (previousConversationId !== conversationId) {
|
||||||
|
handleRecordingInterruption();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
const handleCloseToast = useCallback(() => {
|
||||||
|
setShowVoiceNoteLimitToast(false);
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
useEffect(() => {
|
||||||
|
return () => {
|
||||||
|
handleCloseToast();
|
||||||
|
};
|
||||||
|
}, [handleCloseToast]);
|
||||||
|
|
||||||
|
const startTime = useRef(Date.now());
|
||||||
|
const [duration, setDuration] = useState(0);
|
||||||
|
const drift = useRef(0);
|
||||||
|
|
||||||
|
// update recording duration
|
||||||
|
useEffect(() => {
|
||||||
|
const timeoutId = setTimeout(() => {
|
||||||
|
const now = Date.now();
|
||||||
|
const newDurationMs = now - startTime.current;
|
||||||
|
drift.current = newDurationMs % SECOND;
|
||||||
|
setDuration(newDurationMs / SECOND);
|
||||||
|
|
||||||
|
if (
|
||||||
|
DurationInSeconds.fromMillis(newDurationMs) >= DurationInSeconds.HOUR
|
||||||
|
) {
|
||||||
|
errorRecording(ErrorDialogAudioRecorderType.Timeout);
|
||||||
|
}
|
||||||
|
}, SECOND - drift.current);
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
clearTimeout(timeoutId);
|
||||||
|
};
|
||||||
|
}, [duration, errorRecording]);
|
||||||
|
|
||||||
|
let confirmationDialog: JSX.Element | undefined;
|
||||||
|
if (errorDialogAudioRecorderType === ErrorDialogAudioRecorderType.Timeout) {
|
||||||
|
confirmationDialog = (
|
||||||
|
<ConfirmationDialog
|
||||||
|
dialogName="AudioCapture.sendAnyway"
|
||||||
|
i18n={i18n}
|
||||||
|
onCancel={onCancel}
|
||||||
|
onClose={noop}
|
||||||
|
cancelText={i18n('discard')}
|
||||||
|
actions={[
|
||||||
|
{
|
||||||
|
text: i18n('sendAnyway'),
|
||||||
|
style: 'affirmative',
|
||||||
|
action: onSend,
|
||||||
|
},
|
||||||
|
]}
|
||||||
|
>
|
||||||
|
{i18n('voiceRecordingInterruptedMax')}
|
||||||
|
</ConfirmationDialog>
|
||||||
|
);
|
||||||
|
} else if (
|
||||||
|
errorDialogAudioRecorderType === ErrorDialogAudioRecorderType.ErrorRecording
|
||||||
|
) {
|
||||||
|
confirmationDialog = (
|
||||||
|
<ConfirmationDialog
|
||||||
|
dialogName="AudioCapture.error"
|
||||||
|
i18n={i18n}
|
||||||
|
onCancel={onCancel}
|
||||||
|
onClose={noop}
|
||||||
|
cancelText={i18n('ok')}
|
||||||
|
actions={[]}
|
||||||
|
>
|
||||||
|
{i18n('voiceNoteError')}
|
||||||
|
</ConfirmationDialog>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<RecordingComposer i18n={i18n} onCancel={onCancel} onSend={onSend}>
|
||||||
|
<div className="CompositionRecording__microphone" />
|
||||||
|
<div className="CompositionRecording__timer">
|
||||||
|
{durationToPlaybackText(duration)}
|
||||||
|
</div>
|
||||||
|
|
||||||
|
{confirmationDialog}
|
||||||
|
{showVoiceNoteLimitToast && (
|
||||||
|
<ToastVoiceNoteLimit i18n={i18n} onClose={handleCloseToast} />
|
||||||
|
)}
|
||||||
|
</RecordingComposer>
|
||||||
|
);
|
||||||
|
}
|
85
ts/components/CompositionRecordingDraft.stories.tsx
Normal file
85
ts/components/CompositionRecordingDraft.stories.tsx
Normal file
|
@ -0,0 +1,85 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
import React, { useState } from 'react';
|
||||||
|
import { action } from '@storybook/addon-actions';
|
||||||
|
import { CompositionRecordingDraft } from './CompositionRecordingDraft';
|
||||||
|
|
||||||
|
import { setupI18n } from '../util/setupI18n';
|
||||||
|
import enMessages from '../../_locales/en/messages.json';
|
||||||
|
|
||||||
|
const i18n = setupI18n('en', enMessages);
|
||||||
|
|
||||||
|
export default {
|
||||||
|
title: 'components/CompositionRecordingDraft',
|
||||||
|
component: CompositionRecordingDraft,
|
||||||
|
};
|
||||||
|
|
||||||
|
export function Default(): JSX.Element {
|
||||||
|
const [isPlaying, setIsPlaying] = useState(false);
|
||||||
|
const [currentTime, setCurrentTime] = React.useState<number>(0);
|
||||||
|
const [duration, setDuration] = React.useState<number | undefined>(undefined);
|
||||||
|
|
||||||
|
const audio = React.useMemo(() => {
|
||||||
|
const a = new Audio();
|
||||||
|
|
||||||
|
a.addEventListener('loadedmetadata', () => {
|
||||||
|
setDuration(duration);
|
||||||
|
});
|
||||||
|
|
||||||
|
a.src = '/fixtures/incompetech-com-Agnus-Dei-X.mp3';
|
||||||
|
|
||||||
|
a.addEventListener('timeupdate', () => {
|
||||||
|
setCurrentTime(a.currentTime);
|
||||||
|
});
|
||||||
|
|
||||||
|
a.addEventListener('ended', () => {
|
||||||
|
setIsPlaying(false);
|
||||||
|
setCurrentTime(0);
|
||||||
|
});
|
||||||
|
|
||||||
|
a.addEventListener('loadeddata', () => {
|
||||||
|
a.currentTime = currentTime;
|
||||||
|
});
|
||||||
|
|
||||||
|
return a;
|
||||||
|
// eslint-disable-next-line react-hooks/exhaustive-deps
|
||||||
|
}, []);
|
||||||
|
|
||||||
|
const handlePlay = (positionAsRatio?: number) => {
|
||||||
|
if (positionAsRatio !== undefined) {
|
||||||
|
audio.currentTime = positionAsRatio * audio.duration;
|
||||||
|
}
|
||||||
|
void audio.play();
|
||||||
|
setCurrentTime(audio.currentTime);
|
||||||
|
setIsPlaying(true);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handlePause = () => {
|
||||||
|
audio.pause();
|
||||||
|
setIsPlaying(false);
|
||||||
|
};
|
||||||
|
|
||||||
|
const handleScrub = (newPosition: number) => {
|
||||||
|
if (duration !== undefined) {
|
||||||
|
audio.currentTime = newPosition * duration;
|
||||||
|
}
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<CompositionRecordingDraft
|
||||||
|
i18n={i18n}
|
||||||
|
audioUrl={audio.src}
|
||||||
|
active={{
|
||||||
|
playing: isPlaying,
|
||||||
|
currentTime,
|
||||||
|
duration,
|
||||||
|
}}
|
||||||
|
onCancel={action('cancel')}
|
||||||
|
onSend={action('send')}
|
||||||
|
onPlay={handlePlay}
|
||||||
|
onPause={handlePause}
|
||||||
|
onScrub={handleScrub}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
162
ts/components/CompositionRecordingDraft.tsx
Normal file
162
ts/components/CompositionRecordingDraft.tsx
Normal file
|
@ -0,0 +1,162 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
import React, { useState, useCallback, useRef } from 'react';
|
||||||
|
import type { ContentRect } from 'react-measure';
|
||||||
|
import Measure from 'react-measure';
|
||||||
|
import { useComputePeaks } from '../hooks/useComputePeaks';
|
||||||
|
import type { LocalizerType } from '../types/Util';
|
||||||
|
import { WaveformScrubber } from './conversation/WaveformScrubber';
|
||||||
|
import { PlaybackButton } from './PlaybackButton';
|
||||||
|
import { RecordingComposer } from './RecordingComposer';
|
||||||
|
import * as log from '../logging/log';
|
||||||
|
|
||||||
|
type Props = {
|
||||||
|
i18n: LocalizerType;
|
||||||
|
audioUrl: string | undefined;
|
||||||
|
active:
|
||||||
|
| {
|
||||||
|
playing: boolean;
|
||||||
|
duration: number | undefined;
|
||||||
|
currentTime: number;
|
||||||
|
}
|
||||||
|
| undefined;
|
||||||
|
onCancel: () => void;
|
||||||
|
onSend: () => void;
|
||||||
|
onPlay: (positionAsRatio?: number) => void;
|
||||||
|
onPause: () => void;
|
||||||
|
onScrub: (positionAsRatio: number) => void;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function CompositionRecordingDraft({
|
||||||
|
i18n,
|
||||||
|
audioUrl,
|
||||||
|
active,
|
||||||
|
onCancel,
|
||||||
|
onSend,
|
||||||
|
onPlay,
|
||||||
|
onPause,
|
||||||
|
onScrub,
|
||||||
|
}: Props): JSX.Element {
|
||||||
|
const [state, setState] = useState<{
|
||||||
|
calculatingWidth: boolean;
|
||||||
|
width: undefined | number;
|
||||||
|
}>({ calculatingWidth: false, width: undefined });
|
||||||
|
|
||||||
|
const timeout = useRef<undefined | NodeJS.Timeout>(undefined);
|
||||||
|
|
||||||
|
const handleResize = useCallback(
|
||||||
|
({ bounds }: ContentRect) => {
|
||||||
|
if (!bounds || bounds.width === state.width) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!state.calculatingWidth) {
|
||||||
|
setState({ ...state, calculatingWidth: true });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (timeout.current) {
|
||||||
|
clearTimeout(timeout.current);
|
||||||
|
}
|
||||||
|
|
||||||
|
const newWidth = bounds.width;
|
||||||
|
|
||||||
|
// if mounting, set width immediately
|
||||||
|
// otherwise debounce
|
||||||
|
if (state.width === undefined) {
|
||||||
|
setState({ calculatingWidth: false, width: newWidth });
|
||||||
|
} else {
|
||||||
|
timeout.current = setTimeout(() => {
|
||||||
|
setState({ calculatingWidth: false, width: newWidth });
|
||||||
|
}, 500);
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[state]
|
||||||
|
);
|
||||||
|
|
||||||
|
const handlePlaybackClick = useCallback(() => {
|
||||||
|
if (active?.playing) {
|
||||||
|
onPause();
|
||||||
|
} else {
|
||||||
|
onPlay();
|
||||||
|
}
|
||||||
|
}, [active, onPause, onPlay]);
|
||||||
|
|
||||||
|
const scrubber = (
|
||||||
|
<SizedWaveformScrubber
|
||||||
|
i18n={i18n}
|
||||||
|
audioUrl={audioUrl}
|
||||||
|
activeDuration={active?.duration}
|
||||||
|
currentTime={active?.currentTime ?? 0}
|
||||||
|
width={state.width}
|
||||||
|
onClick={onScrub}
|
||||||
|
onScrub={onScrub}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<RecordingComposer i18n={i18n} onCancel={onCancel} onSend={onSend}>
|
||||||
|
<PlaybackButton
|
||||||
|
variant="draft"
|
||||||
|
mod={active?.playing ? 'pause' : 'play'}
|
||||||
|
label={
|
||||||
|
active?.playing
|
||||||
|
? i18n('MessageAudio--pause')
|
||||||
|
: i18n('MessageAudio--play')
|
||||||
|
}
|
||||||
|
onClick={handlePlaybackClick}
|
||||||
|
/>
|
||||||
|
<Measure bounds onResize={handleResize}>
|
||||||
|
{({ measureRef }) => (
|
||||||
|
<div ref={measureRef} className="CompositionRecordingDraft__sizer">
|
||||||
|
{scrubber}
|
||||||
|
</div>
|
||||||
|
)}
|
||||||
|
</Measure>
|
||||||
|
</RecordingComposer>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
type SizedWaveformScrubberProps = {
|
||||||
|
i18n: LocalizerType;
|
||||||
|
audioUrl: string | undefined;
|
||||||
|
// undefined if we don't have a size yet
|
||||||
|
width: number | undefined;
|
||||||
|
// defined if we are playing
|
||||||
|
activeDuration: number | undefined;
|
||||||
|
currentTime: number;
|
||||||
|
onScrub: (progressAsRatio: number) => void;
|
||||||
|
onClick: (progressAsRatio: number) => void;
|
||||||
|
};
|
||||||
|
function SizedWaveformScrubber({
|
||||||
|
i18n,
|
||||||
|
audioUrl,
|
||||||
|
activeDuration,
|
||||||
|
currentTime,
|
||||||
|
onClick,
|
||||||
|
onScrub,
|
||||||
|
width,
|
||||||
|
}: SizedWaveformScrubberProps) {
|
||||||
|
const handleCorrupted = () => {
|
||||||
|
log.warn('SizedWaveformScrubber: audio corrupted');
|
||||||
|
};
|
||||||
|
const { peaks, duration } = useComputePeaks({
|
||||||
|
audioUrl,
|
||||||
|
activeDuration,
|
||||||
|
onCorrupted: handleCorrupted,
|
||||||
|
barCount: Math.floor((width ?? 800) / 4),
|
||||||
|
});
|
||||||
|
|
||||||
|
return (
|
||||||
|
<WaveformScrubber
|
||||||
|
i18n={i18n}
|
||||||
|
peaks={peaks}
|
||||||
|
currentTime={currentTime}
|
||||||
|
barMinHeight={2}
|
||||||
|
barMaxHeight={20}
|
||||||
|
duration={duration}
|
||||||
|
onClick={onClick}
|
||||||
|
onScrub={onScrub}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
|
@ -17,7 +17,7 @@ export default {
|
||||||
component: MiniPlayer,
|
component: MiniPlayer,
|
||||||
};
|
};
|
||||||
|
|
||||||
export function Basic(): JSX.Element {
|
export function Default(): JSX.Element {
|
||||||
const [active, setActive] = useState(false);
|
const [active, setActive] = useState(false);
|
||||||
|
|
||||||
const [playerState, setPlayerState] = useState(PlayerState.loading);
|
const [playerState, setPlayerState] = useState(PlayerState.loading);
|
||||||
|
|
|
@ -1,11 +1,11 @@
|
||||||
// Copyright 2022 Signal Messenger, LLC
|
// Copyright 2022 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import classNames from 'classnames';
|
|
||||||
import React, { useCallback } from 'react';
|
import React, { useCallback } from 'react';
|
||||||
import type { LocalizerType } from '../types/Util';
|
import type { LocalizerType } from '../types/Util';
|
||||||
import { durationToPlaybackText } from '../util/durationToPlaybackText';
|
import { durationToPlaybackText } from '../util/durationToPlaybackText';
|
||||||
import { Emojify } from './conversation/Emojify';
|
import { Emojify } from './conversation/Emojify';
|
||||||
|
import { PlaybackButton } from './PlaybackButton';
|
||||||
import { PlaybackRateButton } from './PlaybackRateButton';
|
import { PlaybackRateButton } from './PlaybackRateButton';
|
||||||
|
|
||||||
export enum PlayerState {
|
export enum PlayerState {
|
||||||
|
@ -60,15 +60,19 @@ export function MiniPlayer({
|
||||||
}, [state, onPause, onPlay]);
|
}, [state, onPause, onPlay]);
|
||||||
|
|
||||||
let label: string | undefined;
|
let label: string | undefined;
|
||||||
|
let mod: 'play' | 'pause' | 'pending';
|
||||||
switch (state) {
|
switch (state) {
|
||||||
case PlayerState.playing:
|
case PlayerState.playing:
|
||||||
label = i18n('MessageAudio--pause');
|
label = i18n('MessageAudio--pause');
|
||||||
|
mod = 'pause';
|
||||||
break;
|
break;
|
||||||
case PlayerState.paused:
|
case PlayerState.paused:
|
||||||
label = i18n('MessageAudio--play');
|
label = i18n('MessageAudio--play');
|
||||||
|
mod = 'play';
|
||||||
break;
|
break;
|
||||||
case PlayerState.loading:
|
case PlayerState.loading:
|
||||||
label = i18n('MessageAudio--pending');
|
label = i18n('MessageAudio--pending');
|
||||||
|
mod = 'pending';
|
||||||
break;
|
break;
|
||||||
default:
|
default:
|
||||||
throw new TypeError(`Missing case ${state}`);
|
throw new TypeError(`Missing case ${state}`);
|
||||||
|
@ -76,17 +80,12 @@ export function MiniPlayer({
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<div className="MiniPlayer">
|
<div className="MiniPlayer">
|
||||||
<button
|
<PlaybackButton
|
||||||
type="button"
|
context="incoming"
|
||||||
className={classNames(
|
variant="mini"
|
||||||
'MiniPlayer__playback-button',
|
mod={mod}
|
||||||
state === 'playing' && 'MiniPlayer__playback-button--pause',
|
label={label}
|
||||||
state === 'paused' && 'MiniPlayer__playback-button--play',
|
|
||||||
state === 'loading' && 'MiniPlayer__playback-button--pending'
|
|
||||||
)}
|
|
||||||
onClick={handleClick}
|
onClick={handleClick}
|
||||||
aria-label={label}
|
|
||||||
disabled={state === PlayerState.loading}
|
|
||||||
/>
|
/>
|
||||||
|
|
||||||
<div className="MiniPlayer__state">
|
<div className="MiniPlayer__state">
|
||||||
|
|
48
ts/components/PlaybackButton.stories.tsx
Normal file
48
ts/components/PlaybackButton.stories.tsx
Normal file
|
@ -0,0 +1,48 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
import React from 'react';
|
||||||
|
import type { CSSProperties } from 'react';
|
||||||
|
import { action } from '@storybook/addon-actions';
|
||||||
|
import { PlaybackButton } from './PlaybackButton';
|
||||||
|
|
||||||
|
export default {
|
||||||
|
title: 'components/PlaybackButton',
|
||||||
|
component: PlaybackButton,
|
||||||
|
};
|
||||||
|
|
||||||
|
const rowStyles: CSSProperties = {
|
||||||
|
display: 'flex',
|
||||||
|
flexDirection: 'row',
|
||||||
|
padding: 10,
|
||||||
|
};
|
||||||
|
|
||||||
|
export function Default(): JSX.Element {
|
||||||
|
return (
|
||||||
|
<>
|
||||||
|
{(['message', 'draft', 'mini'] as const).map(variant => (
|
||||||
|
<>
|
||||||
|
{(['incoming', 'outgoing'] as const).map(context => (
|
||||||
|
<div
|
||||||
|
style={{
|
||||||
|
...rowStyles,
|
||||||
|
background: context === 'outgoing' ? '#2c6bed' : undefined,
|
||||||
|
}}
|
||||||
|
>
|
||||||
|
{(['play', 'download', 'pending', 'pause'] as const).map(mod => (
|
||||||
|
<PlaybackButton
|
||||||
|
key={`${variant}_${context}_${mod}`}
|
||||||
|
variant={variant}
|
||||||
|
label="playback"
|
||||||
|
onClick={action('click')}
|
||||||
|
context={context}
|
||||||
|
mod={mod}
|
||||||
|
/>
|
||||||
|
))}
|
||||||
|
</div>
|
||||||
|
))}
|
||||||
|
</>
|
||||||
|
))}
|
||||||
|
</>
|
||||||
|
);
|
||||||
|
}
|
86
ts/components/PlaybackButton.tsx
Normal file
86
ts/components/PlaybackButton.tsx
Normal file
|
@ -0,0 +1,86 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
import { animated, useSpring } from '@react-spring/web';
|
||||||
|
import classNames from 'classnames';
|
||||||
|
import React, { useCallback } from 'react';
|
||||||
|
|
||||||
|
const SPRING_CONFIG = {
|
||||||
|
mass: 0.5,
|
||||||
|
tension: 350,
|
||||||
|
friction: 20,
|
||||||
|
velocity: 0.01,
|
||||||
|
};
|
||||||
|
|
||||||
|
type ButtonProps = {
|
||||||
|
context?: 'incoming' | 'outgoing';
|
||||||
|
variant: 'message' | 'mini' | 'draft';
|
||||||
|
mod: 'play' | 'pause' | 'download' | 'pending';
|
||||||
|
label: string;
|
||||||
|
visible?: boolean;
|
||||||
|
onClick: () => void;
|
||||||
|
onMouseDown?: () => void;
|
||||||
|
onMouseUp?: () => void;
|
||||||
|
};
|
||||||
|
|
||||||
|
/** Handles animations, key events, and stopping event propagation */
|
||||||
|
export const PlaybackButton = React.forwardRef<HTMLButtonElement, ButtonProps>(
|
||||||
|
function ButtonInner(props, ref) {
|
||||||
|
const { mod, label, variant, onClick, context, visible = true } = props;
|
||||||
|
const [animProps] = useSpring(
|
||||||
|
{
|
||||||
|
config: SPRING_CONFIG,
|
||||||
|
to: { scale: visible ? 1 : 0 },
|
||||||
|
},
|
||||||
|
[visible]
|
||||||
|
);
|
||||||
|
|
||||||
|
// Clicking button toggle playback
|
||||||
|
const onButtonClick = useCallback(
|
||||||
|
(event: React.MouseEvent) => {
|
||||||
|
event.stopPropagation();
|
||||||
|
event.preventDefault();
|
||||||
|
|
||||||
|
onClick();
|
||||||
|
},
|
||||||
|
[onClick]
|
||||||
|
);
|
||||||
|
|
||||||
|
// Keyboard playback toggle
|
||||||
|
const onButtonKeyDown = useCallback(
|
||||||
|
(event: React.KeyboardEvent) => {
|
||||||
|
if (event.key !== 'Enter' && event.key !== 'Space') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
event.stopPropagation();
|
||||||
|
event.preventDefault();
|
||||||
|
|
||||||
|
onClick();
|
||||||
|
},
|
||||||
|
[onClick]
|
||||||
|
);
|
||||||
|
|
||||||
|
const buttonComponent = (
|
||||||
|
<button
|
||||||
|
type="button"
|
||||||
|
ref={ref}
|
||||||
|
className={classNames(
|
||||||
|
'PlaybackButton',
|
||||||
|
`PlaybackButton--variant-${variant}`,
|
||||||
|
context && `PlaybackButton--context-${context}`,
|
||||||
|
mod ? `PlaybackButton--${mod}` : undefined
|
||||||
|
)}
|
||||||
|
onClick={onButtonClick}
|
||||||
|
onKeyDown={onButtonKeyDown}
|
||||||
|
tabIndex={0}
|
||||||
|
aria-label={label}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
|
||||||
|
if (variant === 'message') {
|
||||||
|
return <animated.div style={animProps}>{buttonComponent}</animated.div>;
|
||||||
|
}
|
||||||
|
|
||||||
|
return buttonComponent;
|
||||||
|
}
|
||||||
|
);
|
42
ts/components/RecordingComposer.tsx
Normal file
42
ts/components/RecordingComposer.tsx
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
import React from 'react';
|
||||||
|
import type { ReactNode } from 'react';
|
||||||
|
import type { LocalizerType } from '../types/I18N';
|
||||||
|
import { Button, ButtonSize, ButtonVariant } from './Button';
|
||||||
|
|
||||||
|
type Props = {
|
||||||
|
i18n: LocalizerType;
|
||||||
|
children: ReactNode;
|
||||||
|
onCancel: () => void;
|
||||||
|
onSend: () => void;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function RecordingComposer({
|
||||||
|
i18n,
|
||||||
|
onCancel,
|
||||||
|
onSend,
|
||||||
|
children,
|
||||||
|
}: Props): JSX.Element {
|
||||||
|
return (
|
||||||
|
<div className="RecordingComposer">
|
||||||
|
<div className="RecordingComposer__content">{children}</div>
|
||||||
|
<Button
|
||||||
|
className="RecordingComposer__button"
|
||||||
|
onClick={onCancel}
|
||||||
|
size={ButtonSize.Medium}
|
||||||
|
variant={ButtonVariant.Secondary}
|
||||||
|
>
|
||||||
|
{i18n('icu:RecordingComposer__cancel')}
|
||||||
|
</Button>
|
||||||
|
<Button
|
||||||
|
className="RecordingComposer__button"
|
||||||
|
onClick={onSend}
|
||||||
|
size={ButtonSize.Medium}
|
||||||
|
>
|
||||||
|
{i18n('icu:RecordingComposer__send')}
|
||||||
|
</Button>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
|
@ -81,7 +81,8 @@ async function doComputePeaks(
|
||||||
url: string,
|
url: string,
|
||||||
barCount: number
|
barCount: number
|
||||||
): Promise<ComputePeaksResult> {
|
): Promise<ComputePeaksResult> {
|
||||||
const existing = waveformCache.get(url);
|
const cacheKey = `${url}:${barCount}`;
|
||||||
|
const existing = waveformCache.get(cacheKey);
|
||||||
if (existing) {
|
if (existing) {
|
||||||
log.info('GlobalAudioContext: waveform cache hit', url);
|
log.info('GlobalAudioContext: waveform cache hit', url);
|
||||||
return Promise.resolve(existing);
|
return Promise.resolve(existing);
|
||||||
|
@ -101,7 +102,7 @@ async function doComputePeaks(
|
||||||
`GlobalAudioContext: audio ${url} duration ${duration}s is too long`
|
`GlobalAudioContext: audio ${url} duration ${duration}s is too long`
|
||||||
);
|
);
|
||||||
const emptyResult = { peaks, duration };
|
const emptyResult = { peaks, duration };
|
||||||
waveformCache.set(url, emptyResult);
|
waveformCache.set(cacheKey, emptyResult);
|
||||||
return emptyResult;
|
return emptyResult;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -143,7 +144,7 @@ async function doComputePeaks(
|
||||||
}
|
}
|
||||||
|
|
||||||
const result = { peaks, duration };
|
const result = { peaks, duration };
|
||||||
waveformCache.set(url, result);
|
waveformCache.set(cacheKey, result);
|
||||||
return result;
|
return result;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,85 +0,0 @@
|
||||||
// Copyright 2020 Signal Messenger, LLC
|
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
|
||||||
|
|
||||||
import * as React from 'react';
|
|
||||||
|
|
||||||
import { action } from '@storybook/addon-actions';
|
|
||||||
import { select } from '@storybook/addon-knobs';
|
|
||||||
|
|
||||||
import {
|
|
||||||
ErrorDialogAudioRecorderType,
|
|
||||||
RecordingState,
|
|
||||||
} from '../../types/AudioRecorder';
|
|
||||||
import type { PropsType } from './AudioCapture';
|
|
||||||
import { AudioCapture } from './AudioCapture';
|
|
||||||
import { setupI18n } from '../../util/setupI18n';
|
|
||||||
import enMessages from '../../../_locales/en/messages.json';
|
|
||||||
|
|
||||||
const i18n = setupI18n('en', enMessages);
|
|
||||||
|
|
||||||
export default {
|
|
||||||
title: 'Components/Conversation/AudioCapture',
|
|
||||||
};
|
|
||||||
|
|
||||||
const createProps = (overrideProps: Partial<PropsType> = {}): PropsType => ({
|
|
||||||
cancelRecording: action('cancelRecording'),
|
|
||||||
completeRecording: action('completeRecording'),
|
|
||||||
conversationId: '123',
|
|
||||||
draftAttachments: [],
|
|
||||||
errorDialogAudioRecorderType: overrideProps.errorDialogAudioRecorderType,
|
|
||||||
errorRecording: action('errorRecording'),
|
|
||||||
i18n,
|
|
||||||
recordingState: select(
|
|
||||||
'recordingState',
|
|
||||||
RecordingState,
|
|
||||||
overrideProps.recordingState || RecordingState.Idle
|
|
||||||
),
|
|
||||||
onSendAudioRecording: action('onSendAudioRecording'),
|
|
||||||
startRecording: action('startRecording'),
|
|
||||||
});
|
|
||||||
|
|
||||||
export function Default(): JSX.Element {
|
|
||||||
return <AudioCapture {...createProps()} />;
|
|
||||||
}
|
|
||||||
|
|
||||||
export const _Initializing = (): JSX.Element => {
|
|
||||||
return (
|
|
||||||
<AudioCapture
|
|
||||||
{...createProps({
|
|
||||||
recordingState: RecordingState.Initializing,
|
|
||||||
})}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export const _Recording = (): JSX.Element => {
|
|
||||||
return (
|
|
||||||
<AudioCapture
|
|
||||||
{...createProps({
|
|
||||||
recordingState: RecordingState.Recording,
|
|
||||||
})}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
};
|
|
||||||
|
|
||||||
export function VoiceLimit(): JSX.Element {
|
|
||||||
return (
|
|
||||||
<AudioCapture
|
|
||||||
{...createProps({
|
|
||||||
errorDialogAudioRecorderType: ErrorDialogAudioRecorderType.Timeout,
|
|
||||||
recordingState: RecordingState.Recording,
|
|
||||||
})}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function SwitchedApps(): JSX.Element {
|
|
||||||
return (
|
|
||||||
<AudioCapture
|
|
||||||
{...createProps({
|
|
||||||
errorDialogAudioRecorderType: ErrorDialogAudioRecorderType.Blur,
|
|
||||||
recordingState: RecordingState.Recording,
|
|
||||||
})}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
}
|
|
|
@ -1,100 +1,30 @@
|
||||||
// Copyright 2016 Signal Messenger, LLC
|
// Copyright 2016 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import React, { useCallback, useEffect, useState } from 'react';
|
import React, { useCallback, useState } from 'react';
|
||||||
import * as moment from 'moment';
|
|
||||||
import { noop } from 'lodash';
|
|
||||||
|
|
||||||
import type {
|
import type { AttachmentDraftType } from '../../types/Attachment';
|
||||||
AttachmentDraftType,
|
|
||||||
InMemoryAttachmentDraftType,
|
|
||||||
} from '../../types/Attachment';
|
|
||||||
import { ConfirmationDialog } from '../ConfirmationDialog';
|
|
||||||
import type { LocalizerType } from '../../types/Util';
|
import type { LocalizerType } from '../../types/Util';
|
||||||
import { ToastVoiceNoteLimit } from '../ToastVoiceNoteLimit';
|
|
||||||
import { ToastVoiceNoteMustBeOnlyAttachment } from '../ToastVoiceNoteMustBeOnlyAttachment';
|
import { ToastVoiceNoteMustBeOnlyAttachment } from '../ToastVoiceNoteMustBeOnlyAttachment';
|
||||||
import { useEscapeHandling } from '../../hooks/useEscapeHandling';
|
|
||||||
import {
|
import {
|
||||||
useStartRecordingShortcut,
|
useStartRecordingShortcut,
|
||||||
useKeyboardShortcuts,
|
useKeyboardShortcuts,
|
||||||
} from '../../hooks/useKeyboardShortcuts';
|
} from '../../hooks/useKeyboardShortcuts';
|
||||||
import {
|
|
||||||
ErrorDialogAudioRecorderType,
|
|
||||||
RecordingState,
|
|
||||||
} from '../../types/AudioRecorder';
|
|
||||||
|
|
||||||
type OnSendAudioRecordingType = (rec: InMemoryAttachmentDraftType) => unknown;
|
|
||||||
|
|
||||||
export type PropsType = {
|
export type PropsType = {
|
||||||
cancelRecording: () => unknown;
|
|
||||||
conversationId: string;
|
conversationId: string;
|
||||||
completeRecording: (
|
|
||||||
conversationId: string,
|
|
||||||
onSendAudioRecording?: OnSendAudioRecordingType
|
|
||||||
) => unknown;
|
|
||||||
draftAttachments: ReadonlyArray<AttachmentDraftType>;
|
draftAttachments: ReadonlyArray<AttachmentDraftType>;
|
||||||
errorDialogAudioRecorderType?: ErrorDialogAudioRecorderType;
|
|
||||||
errorRecording: (e: ErrorDialogAudioRecorderType) => unknown;
|
|
||||||
i18n: LocalizerType;
|
i18n: LocalizerType;
|
||||||
recordingState: RecordingState;
|
|
||||||
onSendAudioRecording: OnSendAudioRecordingType;
|
|
||||||
startRecording: (id: string) => unknown;
|
startRecording: (id: string) => unknown;
|
||||||
};
|
};
|
||||||
|
|
||||||
enum ToastType {
|
|
||||||
VoiceNoteLimit,
|
|
||||||
VoiceNoteMustBeOnlyAttachment,
|
|
||||||
}
|
|
||||||
|
|
||||||
const START_DURATION_TEXT = '0:00';
|
|
||||||
|
|
||||||
export function AudioCapture({
|
export function AudioCapture({
|
||||||
cancelRecording,
|
|
||||||
completeRecording,
|
|
||||||
conversationId,
|
conversationId,
|
||||||
draftAttachments,
|
draftAttachments,
|
||||||
errorDialogAudioRecorderType,
|
|
||||||
errorRecording,
|
|
||||||
i18n,
|
i18n,
|
||||||
recordingState,
|
|
||||||
onSendAudioRecording,
|
|
||||||
startRecording,
|
startRecording,
|
||||||
}: PropsType): JSX.Element {
|
}: PropsType): JSX.Element {
|
||||||
const [durationText, setDurationText] = useState<string>(START_DURATION_TEXT);
|
const [showOnlyAttachmentToast, setShowOnlyAttachmentToast] = useState(false);
|
||||||
const [toastType, setToastType] = useState<ToastType | undefined>();
|
|
||||||
|
|
||||||
// Cancel recording if we switch away from this conversation, unmounting
|
|
||||||
useEffect(() => {
|
|
||||||
return () => {
|
|
||||||
cancelRecording();
|
|
||||||
};
|
|
||||||
}, [cancelRecording]);
|
|
||||||
|
|
||||||
// Stop recording and show confirmation if user switches away from this app
|
|
||||||
useEffect(() => {
|
|
||||||
if (recordingState !== RecordingState.Recording) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const handler = () => {
|
|
||||||
errorRecording(ErrorDialogAudioRecorderType.Blur);
|
|
||||||
};
|
|
||||||
window.addEventListener('blur', handler);
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
window.removeEventListener('blur', handler);
|
|
||||||
};
|
|
||||||
}, [recordingState, completeRecording, errorRecording]);
|
|
||||||
|
|
||||||
const escapeRecording = useCallback(() => {
|
|
||||||
if (recordingState !== RecordingState.Recording) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
cancelRecording();
|
|
||||||
}, [cancelRecording, recordingState]);
|
|
||||||
|
|
||||||
useEscapeHandling(escapeRecording);
|
|
||||||
|
|
||||||
const recordConversation = useCallback(
|
const recordConversation = useCallback(
|
||||||
() => startRecording(conversationId),
|
() => startRecording(conversationId),
|
||||||
|
@ -103,156 +33,40 @@ export function AudioCapture({
|
||||||
const startRecordingShortcut = useStartRecordingShortcut(recordConversation);
|
const startRecordingShortcut = useStartRecordingShortcut(recordConversation);
|
||||||
useKeyboardShortcuts(startRecordingShortcut);
|
useKeyboardShortcuts(startRecordingShortcut);
|
||||||
|
|
||||||
const closeToast = useCallback(() => {
|
const handleCloseToast = useCallback(() => {
|
||||||
setToastType(undefined);
|
setShowOnlyAttachmentToast(false);
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
// Update timestamp regularly, then timeout if recording goes over five minutes
|
const handleClick = useCallback(() => {
|
||||||
useEffect(() => {
|
if (draftAttachments.length) {
|
||||||
if (recordingState !== RecordingState.Recording) {
|
setShowOnlyAttachmentToast(true);
|
||||||
return;
|
} else {
|
||||||
|
startRecording(conversationId);
|
||||||
}
|
}
|
||||||
|
|
||||||
setDurationText(START_DURATION_TEXT);
|
|
||||||
setToastType(ToastType.VoiceNoteLimit);
|
|
||||||
|
|
||||||
const startTime = Date.now();
|
|
||||||
const interval = setInterval(() => {
|
|
||||||
const duration = moment.duration(Date.now() - startTime, 'ms');
|
|
||||||
const minutes = `${Math.trunc(duration.asMinutes())}`;
|
|
||||||
let seconds = `${duration.seconds()}`;
|
|
||||||
if (seconds.length < 2) {
|
|
||||||
seconds = `0${seconds}`;
|
|
||||||
}
|
|
||||||
setDurationText(`${minutes}:${seconds}`);
|
|
||||||
|
|
||||||
if (duration >= moment.duration(1, 'hours')) {
|
|
||||||
errorRecording(ErrorDialogAudioRecorderType.Timeout);
|
|
||||||
}
|
|
||||||
}, 1000);
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
clearInterval(interval);
|
|
||||||
closeToast();
|
|
||||||
};
|
|
||||||
}, [
|
}, [
|
||||||
closeToast,
|
conversationId,
|
||||||
completeRecording,
|
draftAttachments,
|
||||||
errorRecording,
|
setShowOnlyAttachmentToast,
|
||||||
recordingState,
|
startRecording,
|
||||||
setDurationText,
|
|
||||||
]);
|
]);
|
||||||
|
|
||||||
const clickCancel = useCallback(() => {
|
|
||||||
cancelRecording();
|
|
||||||
}, [cancelRecording]);
|
|
||||||
|
|
||||||
const clickSend = useCallback(() => {
|
|
||||||
completeRecording(conversationId, onSendAudioRecording);
|
|
||||||
}, [conversationId, completeRecording, onSendAudioRecording]);
|
|
||||||
|
|
||||||
let toastElement: JSX.Element | undefined;
|
|
||||||
if (toastType === ToastType.VoiceNoteLimit) {
|
|
||||||
toastElement = <ToastVoiceNoteLimit i18n={i18n} onClose={closeToast} />;
|
|
||||||
} else if (toastType === ToastType.VoiceNoteMustBeOnlyAttachment) {
|
|
||||||
toastElement = (
|
|
||||||
<ToastVoiceNoteMustBeOnlyAttachment i18n={i18n} onClose={closeToast} />
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
let confirmationDialog: JSX.Element | undefined;
|
|
||||||
if (
|
|
||||||
errorDialogAudioRecorderType === ErrorDialogAudioRecorderType.Blur ||
|
|
||||||
errorDialogAudioRecorderType === ErrorDialogAudioRecorderType.Timeout
|
|
||||||
) {
|
|
||||||
const confirmationDialogText =
|
|
||||||
errorDialogAudioRecorderType === ErrorDialogAudioRecorderType.Blur
|
|
||||||
? i18n('voiceRecordingInterruptedBlur')
|
|
||||||
: i18n('voiceRecordingInterruptedMax');
|
|
||||||
|
|
||||||
confirmationDialog = (
|
|
||||||
<ConfirmationDialog
|
|
||||||
dialogName="AudioCapture.sendAnyway"
|
|
||||||
i18n={i18n}
|
|
||||||
onCancel={clickCancel}
|
|
||||||
onClose={noop}
|
|
||||||
cancelText={i18n('discard')}
|
|
||||||
actions={[
|
|
||||||
{
|
|
||||||
text: i18n('sendAnyway'),
|
|
||||||
style: 'affirmative',
|
|
||||||
action: clickSend,
|
|
||||||
},
|
|
||||||
]}
|
|
||||||
>
|
|
||||||
{confirmationDialogText}
|
|
||||||
</ConfirmationDialog>
|
|
||||||
);
|
|
||||||
} else if (
|
|
||||||
errorDialogAudioRecorderType === ErrorDialogAudioRecorderType.ErrorRecording
|
|
||||||
) {
|
|
||||||
confirmationDialog = (
|
|
||||||
<ConfirmationDialog
|
|
||||||
dialogName="AudioCapture.error"
|
|
||||||
i18n={i18n}
|
|
||||||
onCancel={clickCancel}
|
|
||||||
onClose={noop}
|
|
||||||
cancelText={i18n('ok')}
|
|
||||||
actions={[]}
|
|
||||||
>
|
|
||||||
{i18n('voiceNoteError')}
|
|
||||||
</ConfirmationDialog>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (recordingState === RecordingState.Recording && !confirmationDialog) {
|
|
||||||
return (
|
|
||||||
<>
|
|
||||||
<div className="AudioCapture">
|
|
||||||
<button
|
|
||||||
className="AudioCapture__recorder-button AudioCapture__recorder-button--complete"
|
|
||||||
onClick={clickSend}
|
|
||||||
tabIndex={0}
|
|
||||||
title={i18n('voiceRecording--complete')}
|
|
||||||
type="button"
|
|
||||||
>
|
|
||||||
<span className="icon" />
|
|
||||||
</button>
|
|
||||||
<span className="AudioCapture__time">{durationText}</span>
|
|
||||||
<button
|
|
||||||
className="AudioCapture__recorder-button AudioCapture__recorder-button--cancel"
|
|
||||||
onClick={clickCancel}
|
|
||||||
tabIndex={0}
|
|
||||||
title={i18n('voiceRecording--cancel')}
|
|
||||||
type="button"
|
|
||||||
>
|
|
||||||
<span className="icon" />
|
|
||||||
</button>
|
|
||||||
</div>
|
|
||||||
{toastElement}
|
|
||||||
</>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<>
|
<>
|
||||||
<div className="AudioCapture">
|
<div className="AudioCapture">
|
||||||
<button
|
<button
|
||||||
aria-label={i18n('voiceRecording--start')}
|
aria-label={i18n('voiceRecording--start')}
|
||||||
className="AudioCapture__microphone"
|
className="AudioCapture__microphone"
|
||||||
onClick={() => {
|
onClick={handleClick}
|
||||||
if (draftAttachments.length) {
|
|
||||||
setToastType(ToastType.VoiceNoteMustBeOnlyAttachment);
|
|
||||||
} else {
|
|
||||||
startRecording(conversationId);
|
|
||||||
}
|
|
||||||
}}
|
|
||||||
title={i18n('voiceRecording--start')}
|
title={i18n('voiceRecording--start')}
|
||||||
type="button"
|
type="button"
|
||||||
/>
|
/>
|
||||||
{confirmationDialog}
|
|
||||||
</div>
|
</div>
|
||||||
{toastElement}
|
{showOnlyAttachmentToast && (
|
||||||
|
<ToastVoiceNoteMustBeOnlyAttachment
|
||||||
|
i18n={i18n}
|
||||||
|
onClose={handleCloseToast}
|
||||||
|
/>
|
||||||
|
)}
|
||||||
</>
|
</>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,7 +1,7 @@
|
||||||
// Copyright 2021 Signal Messenger, LLC
|
// Copyright 2021 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import React, { useCallback, useRef, useEffect, useState } from 'react';
|
import React, { useCallback } from 'react';
|
||||||
import type { RefObject } from 'react';
|
import type { RefObject } from 'react';
|
||||||
import classNames from 'classnames';
|
import classNames from 'classnames';
|
||||||
import { noop } from 'lodash';
|
import { noop } from 'lodash';
|
||||||
|
@ -18,6 +18,9 @@ import { MessageMetadata } from './MessageMetadata';
|
||||||
import * as log from '../../logging/log';
|
import * as log from '../../logging/log';
|
||||||
import type { ActiveAudioPlayerStateType } from '../../state/ducks/audioPlayer';
|
import type { ActiveAudioPlayerStateType } from '../../state/ducks/audioPlayer';
|
||||||
import { PlaybackRateButton } from '../PlaybackRateButton';
|
import { PlaybackRateButton } from '../PlaybackRateButton';
|
||||||
|
import { PlaybackButton } from '../PlaybackButton';
|
||||||
|
import { WaveformScrubber } from './WaveformScrubber';
|
||||||
|
import { useComputePeaks } from '../../hooks/useComputePeaks';
|
||||||
import { durationToPlaybackText } from '../../util/durationToPlaybackText';
|
import { durationToPlaybackText } from '../../util/durationToPlaybackText';
|
||||||
|
|
||||||
export type OwnProps = Readonly<{
|
export type OwnProps = Readonly<{
|
||||||
|
@ -58,15 +61,6 @@ export type DispatchProps = Readonly<{
|
||||||
|
|
||||||
export type Props = OwnProps & DispatchProps;
|
export type Props = OwnProps & DispatchProps;
|
||||||
|
|
||||||
type ButtonProps = {
|
|
||||||
mod?: string;
|
|
||||||
label: string;
|
|
||||||
visible?: boolean;
|
|
||||||
onClick: () => void;
|
|
||||||
onMouseDown?: () => void;
|
|
||||||
onMouseUp?: () => void;
|
|
||||||
};
|
|
||||||
|
|
||||||
enum State {
|
enum State {
|
||||||
NotDownloaded = 'NotDownloaded',
|
NotDownloaded = 'NotDownloaded',
|
||||||
Pending = 'Pending',
|
Pending = 'Pending',
|
||||||
|
@ -82,12 +76,6 @@ const BAR_NOT_DOWNLOADED_HEIGHT = 2;
|
||||||
const BAR_MIN_HEIGHT = 4;
|
const BAR_MIN_HEIGHT = 4;
|
||||||
const BAR_MAX_HEIGHT = 20;
|
const BAR_MAX_HEIGHT = 20;
|
||||||
|
|
||||||
const REWIND_BAR_COUNT = 2;
|
|
||||||
|
|
||||||
// Increments for keyboard audio seek (in seconds)
|
|
||||||
const SMALL_INCREMENT = 1;
|
|
||||||
const BIG_INCREMENT = 5;
|
|
||||||
|
|
||||||
const SPRING_CONFIG = {
|
const SPRING_CONFIG = {
|
||||||
mass: 0.5,
|
mass: 0.5,
|
||||||
tension: 350,
|
tension: 350,
|
||||||
|
@ -97,62 +85,6 @@ const SPRING_CONFIG = {
|
||||||
|
|
||||||
const DOT_DIV_WIDTH = 14;
|
const DOT_DIV_WIDTH = 14;
|
||||||
|
|
||||||
/** Handles animations, key events, and stopping event propagation */
|
|
||||||
const PlaybackButton = React.forwardRef<HTMLButtonElement, ButtonProps>(
|
|
||||||
function ButtonInner(props, ref) {
|
|
||||||
const { mod, label, onClick, visible = true } = props;
|
|
||||||
const [animProps] = useSpring(
|
|
||||||
{
|
|
||||||
config: SPRING_CONFIG,
|
|
||||||
to: { scale: visible ? 1 : 0 },
|
|
||||||
},
|
|
||||||
[visible]
|
|
||||||
);
|
|
||||||
|
|
||||||
// Clicking button toggle playback
|
|
||||||
const onButtonClick = useCallback(
|
|
||||||
(event: React.MouseEvent) => {
|
|
||||||
event.stopPropagation();
|
|
||||||
event.preventDefault();
|
|
||||||
|
|
||||||
onClick();
|
|
||||||
},
|
|
||||||
[onClick]
|
|
||||||
);
|
|
||||||
|
|
||||||
// Keyboard playback toggle
|
|
||||||
const onButtonKeyDown = useCallback(
|
|
||||||
(event: React.KeyboardEvent) => {
|
|
||||||
if (event.key !== 'Enter' && event.key !== 'Space') {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
event.stopPropagation();
|
|
||||||
event.preventDefault();
|
|
||||||
|
|
||||||
onClick();
|
|
||||||
},
|
|
||||||
[onClick]
|
|
||||||
);
|
|
||||||
|
|
||||||
return (
|
|
||||||
<animated.div style={animProps}>
|
|
||||||
<button
|
|
||||||
type="button"
|
|
||||||
ref={ref}
|
|
||||||
className={classNames(
|
|
||||||
`${CSS_BASE}__play-button`,
|
|
||||||
mod ? `${CSS_BASE}__play-button--${mod}` : undefined
|
|
||||||
)}
|
|
||||||
onClick={onButtonClick}
|
|
||||||
onKeyDown={onButtonKeyDown}
|
|
||||||
tabIndex={0}
|
|
||||||
aria-label={label}
|
|
||||||
/>
|
|
||||||
</animated.div>
|
|
||||||
);
|
|
||||||
}
|
|
||||||
);
|
|
||||||
|
|
||||||
function PlayedDot({
|
function PlayedDot({
|
||||||
played,
|
played,
|
||||||
onHide,
|
onHide,
|
||||||
|
@ -222,7 +154,6 @@ export function MessageAudio(props: Props): JSX.Element {
|
||||||
|
|
||||||
kickOffAttachmentDownload,
|
kickOffAttachmentDownload,
|
||||||
onCorrupted,
|
onCorrupted,
|
||||||
computePeaks,
|
|
||||||
setPlaybackRate,
|
setPlaybackRate,
|
||||||
onPlayMessage,
|
onPlayMessage,
|
||||||
pushPanelForConversation,
|
pushPanelForConversation,
|
||||||
|
@ -230,21 +161,18 @@ export function MessageAudio(props: Props): JSX.Element {
|
||||||
setIsPlaying,
|
setIsPlaying,
|
||||||
} = props;
|
} = props;
|
||||||
|
|
||||||
const waveformRef = useRef<HTMLDivElement | null>(null);
|
|
||||||
|
|
||||||
const isPlaying = active?.playing ?? false;
|
const isPlaying = active?.playing ?? false;
|
||||||
|
|
||||||
const [isPlayedDotVisible, setIsPlayedDotVisible] = React.useState(!played);
|
const [isPlayedDotVisible, setIsPlayedDotVisible] = React.useState(!played);
|
||||||
|
|
||||||
// if it's playing, use the duration passed as props as it might
|
const audioUrl = isDownloaded(attachment) ? attachment.url : undefined;
|
||||||
// change during loading/playback (?)
|
|
||||||
// NOTE: Avoid division by zero
|
|
||||||
const [duration, setDuration] = useState(active?.duration ?? 1e-23);
|
|
||||||
|
|
||||||
const [hasPeaks, setHasPeaks] = useState(false);
|
const { duration, hasPeaks, peaks } = useComputePeaks({
|
||||||
const [peaks, setPeaks] = useState<ReadonlyArray<number>>(
|
audioUrl,
|
||||||
new Array(BAR_COUNT).fill(0)
|
activeDuration: active?.duration,
|
||||||
);
|
barCount: BAR_COUNT,
|
||||||
|
onCorrupted,
|
||||||
|
});
|
||||||
|
|
||||||
let state: State;
|
let state: State;
|
||||||
|
|
||||||
|
@ -258,60 +186,7 @@ export function MessageAudio(props: Props): JSX.Element {
|
||||||
state = State.Normal;
|
state = State.Normal;
|
||||||
}
|
}
|
||||||
|
|
||||||
// This effect loads audio file and computes its RMS peak for displaying the
|
const toggleIsPlaying = useCallback(() => {
|
||||||
// waveform.
|
|
||||||
useEffect(() => {
|
|
||||||
if (state !== State.Computing) {
|
|
||||||
return noop;
|
|
||||||
}
|
|
||||||
|
|
||||||
log.info('MessageAudio: loading audio and computing waveform');
|
|
||||||
|
|
||||||
let canceled = false;
|
|
||||||
|
|
||||||
void (async () => {
|
|
||||||
try {
|
|
||||||
if (!attachment.url) {
|
|
||||||
throw new Error(
|
|
||||||
'Expected attachment url in the MessageAudio with ' +
|
|
||||||
`state: ${state}`
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
const { peaks: newPeaks, duration: newDuration } = await computePeaks(
|
|
||||||
attachment.url,
|
|
||||||
BAR_COUNT
|
|
||||||
);
|
|
||||||
if (canceled) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
setPeaks(newPeaks);
|
|
||||||
setHasPeaks(true);
|
|
||||||
setDuration(Math.max(newDuration, 1e-23));
|
|
||||||
} catch (err) {
|
|
||||||
log.error(
|
|
||||||
'MessageAudio: computePeaks error, marking as corrupted',
|
|
||||||
err
|
|
||||||
);
|
|
||||||
|
|
||||||
onCorrupted();
|
|
||||||
}
|
|
||||||
})();
|
|
||||||
|
|
||||||
return () => {
|
|
||||||
canceled = true;
|
|
||||||
};
|
|
||||||
}, [
|
|
||||||
attachment,
|
|
||||||
computePeaks,
|
|
||||||
setDuration,
|
|
||||||
setPeaks,
|
|
||||||
setHasPeaks,
|
|
||||||
onCorrupted,
|
|
||||||
state,
|
|
||||||
]);
|
|
||||||
|
|
||||||
const toggleIsPlaying = () => {
|
|
||||||
if (!isPlaying) {
|
if (!isPlaying) {
|
||||||
if (!attachment.url) {
|
if (!attachment.url) {
|
||||||
throw new Error(
|
throw new Error(
|
||||||
|
@ -328,144 +203,96 @@ export function MessageAudio(props: Props): JSX.Element {
|
||||||
} else {
|
} else {
|
||||||
setIsPlaying(false);
|
setIsPlaying(false);
|
||||||
}
|
}
|
||||||
};
|
}, [
|
||||||
|
isPlaying,
|
||||||
// Clicking waveform moves playback head position and starts playback.
|
attachment.url,
|
||||||
const onWaveformClick = (event: React.MouseEvent) => {
|
active,
|
||||||
event.preventDefault();
|
state,
|
||||||
event.stopPropagation();
|
setIsPlaying,
|
||||||
|
id,
|
||||||
if (state !== State.Normal) {
|
onPlayMessage,
|
||||||
return;
|
]);
|
||||||
}
|
|
||||||
if (!waveformRef.current) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const boundingRect = waveformRef.current.getBoundingClientRect();
|
|
||||||
let progress = (event.pageX - boundingRect.left) / boundingRect.width;
|
|
||||||
|
|
||||||
if (progress <= REWIND_BAR_COUNT / BAR_COUNT) {
|
|
||||||
progress = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (active) {
|
|
||||||
setPosition(progress);
|
|
||||||
if (!active.playing) {
|
|
||||||
setIsPlaying(true);
|
|
||||||
}
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
if (attachment.url) {
|
|
||||||
onPlayMessage(id, progress);
|
|
||||||
} else {
|
|
||||||
log.warn('Waveform clicked on attachment with no url');
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
// Keyboard navigation for waveform. Pressing keys moves playback head
|
|
||||||
// forward/backwards.
|
|
||||||
const onWaveformKeyDown = (event: React.KeyboardEvent) => {
|
|
||||||
let increment: number;
|
|
||||||
if (event.key === 'ArrowRight' || event.key === 'ArrowUp') {
|
|
||||||
increment = +SMALL_INCREMENT;
|
|
||||||
} else if (event.key === 'ArrowLeft' || event.key === 'ArrowDown') {
|
|
||||||
increment = -SMALL_INCREMENT;
|
|
||||||
} else if (event.key === 'PageUp') {
|
|
||||||
increment = +BIG_INCREMENT;
|
|
||||||
} else if (event.key === 'PageDown') {
|
|
||||||
increment = -BIG_INCREMENT;
|
|
||||||
} else {
|
|
||||||
// We don't handle other keys
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
event.preventDefault();
|
|
||||||
event.stopPropagation();
|
|
||||||
|
|
||||||
// There is no audio to rewind
|
|
||||||
if (!active) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const currentPosition = active.currentTime / duration;
|
|
||||||
const positionIncrement = increment / duration;
|
|
||||||
|
|
||||||
setPosition(currentPosition + positionIncrement);
|
|
||||||
|
|
||||||
if (!isPlaying) {
|
|
||||||
toggleIsPlaying();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
|
|
||||||
const currentTimeOrZero = active?.currentTime ?? 0;
|
const currentTimeOrZero = active?.currentTime ?? 0;
|
||||||
|
|
||||||
const peakPosition = peaks.length * (currentTimeOrZero / duration);
|
const updatePosition = useCallback(
|
||||||
|
(newPosition: number) => {
|
||||||
|
if (active) {
|
||||||
|
setPosition(newPosition);
|
||||||
|
if (!active.playing) {
|
||||||
|
setIsPlaying(true);
|
||||||
|
}
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (attachment.url) {
|
||||||
|
onPlayMessage(id, newPosition);
|
||||||
|
} else {
|
||||||
|
log.warn('Waveform clicked on attachment with no url');
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[active, attachment.url, id, onPlayMessage, setIsPlaying, setPosition]
|
||||||
|
);
|
||||||
|
|
||||||
|
const handleWaveformClick = useCallback(
|
||||||
|
(positionAsRatio: number) => {
|
||||||
|
if (state !== State.Normal) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
updatePosition(positionAsRatio);
|
||||||
|
},
|
||||||
|
[state, updatePosition]
|
||||||
|
);
|
||||||
|
|
||||||
|
const handleWaveformScrub = useCallback(
|
||||||
|
(amountInSeconds: number) => {
|
||||||
|
const currentPosition = currentTimeOrZero / duration;
|
||||||
|
const positionIncrement = amountInSeconds / duration;
|
||||||
|
|
||||||
|
updatePosition(
|
||||||
|
Math.min(Math.max(0, currentPosition + positionIncrement), duration)
|
||||||
|
);
|
||||||
|
},
|
||||||
|
[currentTimeOrZero, duration, updatePosition]
|
||||||
|
);
|
||||||
|
|
||||||
const waveform = (
|
const waveform = (
|
||||||
<div
|
<WaveformScrubber
|
||||||
ref={waveformRef}
|
i18n={i18n}
|
||||||
className={`${CSS_BASE}__waveform`}
|
peaks={peaks}
|
||||||
onClick={onWaveformClick}
|
duration={duration}
|
||||||
onKeyDown={onWaveformKeyDown}
|
currentTime={currentTimeOrZero}
|
||||||
tabIndex={0}
|
barMinHeight={
|
||||||
role="slider"
|
state !== State.Normal ? BAR_NOT_DOWNLOADED_HEIGHT : BAR_MIN_HEIGHT
|
||||||
aria-label={i18n('MessageAudio--slider')}
|
}
|
||||||
aria-orientation="horizontal"
|
barMaxHeight={BAR_MAX_HEIGHT}
|
||||||
aria-valuenow={currentTimeOrZero}
|
onClick={handleWaveformClick}
|
||||||
aria-valuemin={0}
|
onScrub={handleWaveformScrub}
|
||||||
aria-valuemax={duration}
|
/>
|
||||||
aria-valuetext={durationToPlaybackText(currentTimeOrZero)}
|
|
||||||
>
|
|
||||||
{peaks.map((peak, i) => {
|
|
||||||
let height = Math.max(BAR_MIN_HEIGHT, BAR_MAX_HEIGHT * peak);
|
|
||||||
if (state !== State.Normal) {
|
|
||||||
height = BAR_NOT_DOWNLOADED_HEIGHT;
|
|
||||||
}
|
|
||||||
|
|
||||||
const highlight = i < peakPosition;
|
|
||||||
|
|
||||||
// Use maximum height for current audio position
|
|
||||||
if (highlight && i + 1 >= peakPosition) {
|
|
||||||
height = BAR_MAX_HEIGHT;
|
|
||||||
}
|
|
||||||
|
|
||||||
const key = i;
|
|
||||||
|
|
||||||
return (
|
|
||||||
<div
|
|
||||||
className={classNames([
|
|
||||||
`${CSS_BASE}__waveform__bar`,
|
|
||||||
highlight ? `${CSS_BASE}__waveform__bar--active` : null,
|
|
||||||
])}
|
|
||||||
key={key}
|
|
||||||
style={{ height }}
|
|
||||||
/>
|
|
||||||
);
|
|
||||||
})}
|
|
||||||
</div>
|
|
||||||
);
|
);
|
||||||
|
|
||||||
let button: React.ReactElement;
|
let button: React.ReactElement;
|
||||||
if (state === State.Pending || state === State.Computing) {
|
if (state === State.Pending || state === State.Computing) {
|
||||||
// Not really a button, but who cares?
|
// Not really a button, but who cares?
|
||||||
button = (
|
button = (
|
||||||
<div
|
<PlaybackButton
|
||||||
className={classNames(
|
variant="message"
|
||||||
`${CSS_BASE}__spinner`,
|
mod="pending"
|
||||||
`${CSS_BASE}__spinner--pending`
|
onClick={noop}
|
||||||
)}
|
label={i18n('MessageAudio--pending')}
|
||||||
title={i18n('MessageAudio--pending')}
|
context={direction}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
} else if (state === State.NotDownloaded) {
|
} else if (state === State.NotDownloaded) {
|
||||||
button = (
|
button = (
|
||||||
<PlaybackButton
|
<PlaybackButton
|
||||||
ref={buttonRef}
|
ref={buttonRef}
|
||||||
|
variant="message"
|
||||||
mod="download"
|
mod="download"
|
||||||
label="MessageAudio--download"
|
label={i18n('MessageAudio--download')}
|
||||||
onClick={kickOffAttachmentDownload}
|
onClick={kickOffAttachmentDownload}
|
||||||
|
context={direction}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
|
@ -473,11 +300,13 @@ export function MessageAudio(props: Props): JSX.Element {
|
||||||
button = (
|
button = (
|
||||||
<PlaybackButton
|
<PlaybackButton
|
||||||
ref={buttonRef}
|
ref={buttonRef}
|
||||||
|
variant="message"
|
||||||
mod={isPlaying ? 'pause' : 'play'}
|
mod={isPlaying ? 'pause' : 'play'}
|
||||||
label={
|
label={
|
||||||
isPlaying ? i18n('MessageAudio--pause') : i18n('MessageAudio--play')
|
isPlaying ? i18n('MessageAudio--pause') : i18n('MessageAudio--play')
|
||||||
}
|
}
|
||||||
onClick={toggleIsPlaying}
|
onClick={toggleIsPlaying}
|
||||||
|
context={direction}
|
||||||
/>
|
/>
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
@ -2,7 +2,7 @@
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
import * as React from 'react';
|
import * as React from 'react';
|
||||||
import { isBoolean } from 'lodash';
|
import { isBoolean, noop } from 'lodash';
|
||||||
|
|
||||||
import { action } from '@storybook/addon-actions';
|
import { action } from '@storybook/addon-actions';
|
||||||
import { boolean, number, select, text } from '@storybook/addon-knobs';
|
import { boolean, number, select, text } from '@storybook/addon-knobs';
|
||||||
|
@ -134,12 +134,14 @@ function MessageAudioContainer({
|
||||||
const [isActive, setIsActive] = React.useState<boolean>(false);
|
const [isActive, setIsActive] = React.useState<boolean>(false);
|
||||||
const [currentTime, setCurrentTime] = React.useState<number>(0);
|
const [currentTime, setCurrentTime] = React.useState<number>(0);
|
||||||
const [playbackRate, setPlaybackRate] = React.useState<number>(1);
|
const [playbackRate, setPlaybackRate] = React.useState<number>(1);
|
||||||
const [playing, setPlaying] = React.useState<boolean>(false);
|
const [isPlaying, setIsPlaying] = React.useState<boolean>(false);
|
||||||
const [_played, setPlayed] = React.useState<boolean>(played);
|
const [_played, setPlayed] = React.useState<boolean>(played);
|
||||||
|
|
||||||
const audio = React.useMemo(() => {
|
const audioPlayer = React.useMemo(() => {
|
||||||
const a = new Audio();
|
const a = new Audio();
|
||||||
|
|
||||||
|
let onLoadedData: () => void = noop;
|
||||||
|
|
||||||
a.addEventListener('timeupdate', () => {
|
a.addEventListener('timeupdate', () => {
|
||||||
setCurrentTime(a.currentTime);
|
setCurrentTime(a.currentTime);
|
||||||
});
|
});
|
||||||
|
@ -148,54 +150,76 @@ function MessageAudioContainer({
|
||||||
setIsActive(false);
|
setIsActive(false);
|
||||||
});
|
});
|
||||||
|
|
||||||
a.addEventListener('loadeddata', () => {
|
a.addEventListener('loadeddata', () => onLoadedData());
|
||||||
a.currentTime = currentTime;
|
|
||||||
});
|
|
||||||
|
|
||||||
return a;
|
function play(positionAsRatio?: number) {
|
||||||
// eslint-disable-next-line react-hooks/exhaustive-deps
|
if (positionAsRatio !== undefined) {
|
||||||
|
a.currentTime = positionAsRatio * a.duration;
|
||||||
|
}
|
||||||
|
void a.play();
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
loadAndPlay(url: string, positionAsRatio: number) {
|
||||||
|
onLoadedData = () => {
|
||||||
|
play(positionAsRatio);
|
||||||
|
};
|
||||||
|
a.src = url;
|
||||||
|
},
|
||||||
|
play,
|
||||||
|
pause() {
|
||||||
|
a.pause();
|
||||||
|
},
|
||||||
|
set playbackRate(rate: number) {
|
||||||
|
a.playbackRate = rate;
|
||||||
|
},
|
||||||
|
set currentTime(value: number) {
|
||||||
|
a.currentTime = value;
|
||||||
|
},
|
||||||
|
get duration() {
|
||||||
|
return a.duration;
|
||||||
|
},
|
||||||
|
};
|
||||||
}, []);
|
}, []);
|
||||||
|
|
||||||
const handlePlayMessage = (id: string, position: number) => {
|
const handlePlayMessage = (id: string, positionAsRatio: number) => {
|
||||||
if (!active) {
|
if (!active) {
|
||||||
audio.src = messageIdToAudioUrl[id as keyof typeof messageIdToAudioUrl];
|
audioPlayer.loadAndPlay(
|
||||||
|
messageIdToAudioUrl[id as keyof typeof messageIdToAudioUrl],
|
||||||
|
positionAsRatio
|
||||||
|
);
|
||||||
setIsActive(true);
|
setIsActive(true);
|
||||||
}
|
setIsPlaying(true);
|
||||||
if (!playing) {
|
|
||||||
void audio.play();
|
|
||||||
setPlaying(true);
|
|
||||||
setPlayed(true);
|
setPlayed(true);
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!Number.isNaN(audio.duration)) {
|
|
||||||
audio.currentTime = audio.duration * position;
|
|
||||||
}
|
|
||||||
if (!Number.isNaN(audio.currentTime)) {
|
|
||||||
setCurrentTime(audio.currentTime);
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
const setPlaybackRateAction = (rate: number) => {
|
const setPlaybackRateAction = (rate: number) => {
|
||||||
audio.playbackRate = rate;
|
audioPlayer.playbackRate = rate;
|
||||||
setPlaybackRate(rate);
|
setPlaybackRate(rate);
|
||||||
};
|
};
|
||||||
|
|
||||||
const setIsPlayingAction = (value: boolean) => {
|
const setIsPlayingAction = (value: boolean) => {
|
||||||
if (value) {
|
if (value) {
|
||||||
void audio.play();
|
audioPlayer.play();
|
||||||
} else {
|
} else {
|
||||||
audio.pause();
|
audioPlayer.pause();
|
||||||
}
|
}
|
||||||
setPlaying(value);
|
setIsPlaying(value);
|
||||||
};
|
};
|
||||||
|
|
||||||
const setPosition = (value: number) => {
|
const setPosition = (value: number) => {
|
||||||
audio.currentTime = value * audio.duration;
|
audioPlayer.currentTime = value * audioPlayer.duration;
|
||||||
setCurrentTime(audio.currentTime);
|
setCurrentTime(audioPlayer.currentTime);
|
||||||
};
|
};
|
||||||
|
|
||||||
const active = isActive
|
const active = isActive
|
||||||
? { playing, playbackRate, currentTime, duration: audio.duration }
|
? {
|
||||||
|
playing: isPlaying,
|
||||||
|
playbackRate,
|
||||||
|
currentTime,
|
||||||
|
duration: audioPlayer.duration,
|
||||||
|
}
|
||||||
: undefined;
|
: undefined;
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|
60
ts/components/conversation/Waveform.tsx
Normal file
60
ts/components/conversation/Waveform.tsx
Normal file
|
@ -0,0 +1,60 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
import classNames from 'classnames';
|
||||||
|
import React from 'react';
|
||||||
|
import { assertDev } from '../../util/assert';
|
||||||
|
|
||||||
|
type Props = {
|
||||||
|
peaks: ReadonlyArray<number>;
|
||||||
|
barMinHeight: number;
|
||||||
|
barMaxHeight: number;
|
||||||
|
currentTime: number | undefined;
|
||||||
|
duration: number | undefined;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function Waveform({
|
||||||
|
peaks,
|
||||||
|
barMinHeight,
|
||||||
|
barMaxHeight,
|
||||||
|
currentTime,
|
||||||
|
duration,
|
||||||
|
}: Props): JSX.Element {
|
||||||
|
const currentTimeOrZero = currentTime ?? 0;
|
||||||
|
const peakPosition = peaks.length * (currentTimeOrZero / (duration ?? 1e-23));
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div className={classNames(['Waveform'])}>
|
||||||
|
{peaks.map((peak, i) => {
|
||||||
|
assertDev(
|
||||||
|
peak >= 0 && peak <= 1 && !Number.isNaN(peak),
|
||||||
|
`Peak outside of range: ${peak}`
|
||||||
|
);
|
||||||
|
|
||||||
|
let height = Math.max(barMinHeight, barMaxHeight * peak);
|
||||||
|
|
||||||
|
const highlight = i < peakPosition;
|
||||||
|
|
||||||
|
// Use maximum height for current audio position
|
||||||
|
if (highlight && i + 1 >= peakPosition) {
|
||||||
|
height = barMaxHeight;
|
||||||
|
}
|
||||||
|
|
||||||
|
assertDev(!Number.isNaN(height), 'Got NaN for peak height');
|
||||||
|
|
||||||
|
const key = i;
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
className={classNames([
|
||||||
|
'Waveform__bar',
|
||||||
|
highlight ? 'Waveform__bar--active' : null,
|
||||||
|
])}
|
||||||
|
key={key}
|
||||||
|
style={{ height }}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
})}
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
}
|
123
ts/components/conversation/WaveformScrubber.tsx
Normal file
123
ts/components/conversation/WaveformScrubber.tsx
Normal file
|
@ -0,0 +1,123 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
import React, { useCallback, useRef } from 'react';
|
||||||
|
import { useRefMerger } from '../../hooks/useRefMerger';
|
||||||
|
import type { LocalizerType } from '../../types/Util';
|
||||||
|
import { durationToPlaybackText } from '../../util/durationToPlaybackText';
|
||||||
|
import { Waveform } from './Waveform';
|
||||||
|
|
||||||
|
type Props = Readonly<{
|
||||||
|
i18n: LocalizerType;
|
||||||
|
peaks: ReadonlyArray<number>;
|
||||||
|
currentTime: number;
|
||||||
|
duration: number | undefined;
|
||||||
|
barMinHeight: number;
|
||||||
|
barMaxHeight: number;
|
||||||
|
onClick: (positionAsRatio: number) => void;
|
||||||
|
onScrub: (positionAsRatio: number) => void;
|
||||||
|
}>;
|
||||||
|
|
||||||
|
const BAR_COUNT = 47;
|
||||||
|
|
||||||
|
const REWIND_BAR_COUNT = 2;
|
||||||
|
|
||||||
|
// Increments for keyboard audio seek (in seconds)\
|
||||||
|
const SMALL_INCREMENT = 1;
|
||||||
|
const BIG_INCREMENT = 5;
|
||||||
|
|
||||||
|
export const WaveformScrubber = React.forwardRef(function WaveformScrubber(
|
||||||
|
{
|
||||||
|
i18n,
|
||||||
|
peaks,
|
||||||
|
barMinHeight,
|
||||||
|
barMaxHeight,
|
||||||
|
currentTime,
|
||||||
|
duration,
|
||||||
|
onClick,
|
||||||
|
onScrub,
|
||||||
|
}: Props,
|
||||||
|
ref
|
||||||
|
): JSX.Element {
|
||||||
|
const refMerger = useRefMerger();
|
||||||
|
|
||||||
|
const waveformRef = useRef<HTMLDivElement | null>(null);
|
||||||
|
|
||||||
|
// Clicking waveform moves playback head position and starts playback.
|
||||||
|
const handleClick = useCallback(
|
||||||
|
(event: React.MouseEvent) => {
|
||||||
|
event.preventDefault();
|
||||||
|
event.stopPropagation();
|
||||||
|
|
||||||
|
if (!waveformRef.current) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const boundingRect = waveformRef.current.getBoundingClientRect();
|
||||||
|
let progress = (event.pageX - boundingRect.left) / boundingRect.width;
|
||||||
|
|
||||||
|
if (progress <= REWIND_BAR_COUNT / BAR_COUNT) {
|
||||||
|
progress = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
onClick(progress);
|
||||||
|
},
|
||||||
|
[waveformRef, onClick]
|
||||||
|
);
|
||||||
|
|
||||||
|
// Keyboard navigation for waveform. Pressing keys moves playback head
|
||||||
|
// forward/backwards.
|
||||||
|
const handleKeyDown = (event: React.KeyboardEvent) => {
|
||||||
|
if (!duration) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
let increment: number;
|
||||||
|
if (event.key === 'ArrowRight' || event.key === 'ArrowUp') {
|
||||||
|
increment = +SMALL_INCREMENT;
|
||||||
|
} else if (event.key === 'ArrowLeft' || event.key === 'ArrowDown') {
|
||||||
|
increment = -SMALL_INCREMENT;
|
||||||
|
} else if (event.key === 'PageUp') {
|
||||||
|
increment = +BIG_INCREMENT;
|
||||||
|
} else if (event.key === 'PageDown') {
|
||||||
|
increment = -BIG_INCREMENT;
|
||||||
|
} else {
|
||||||
|
// We don't handle other keys
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
event.preventDefault();
|
||||||
|
event.stopPropagation();
|
||||||
|
|
||||||
|
const currentPosition = currentTime / duration;
|
||||||
|
const positionIncrement = increment / duration;
|
||||||
|
const newPosition = currentPosition + positionIncrement;
|
||||||
|
|
||||||
|
onScrub(newPosition);
|
||||||
|
};
|
||||||
|
|
||||||
|
return (
|
||||||
|
<div
|
||||||
|
ref={refMerger(waveformRef, ref)}
|
||||||
|
className="WaveformScrubber"
|
||||||
|
onClick={handleClick}
|
||||||
|
onKeyDown={handleKeyDown}
|
||||||
|
tabIndex={0}
|
||||||
|
role="slider"
|
||||||
|
aria-label={i18n('MessageAudio--slider')}
|
||||||
|
aria-orientation="horizontal"
|
||||||
|
aria-valuenow={currentTime}
|
||||||
|
aria-valuemin={0}
|
||||||
|
aria-valuemax={duration}
|
||||||
|
aria-valuetext={durationToPlaybackText(currentTime)}
|
||||||
|
>
|
||||||
|
<Waveform
|
||||||
|
peaks={peaks}
|
||||||
|
barMinHeight={barMinHeight}
|
||||||
|
barMaxHeight={barMaxHeight}
|
||||||
|
currentTime={currentTime}
|
||||||
|
duration={duration}
|
||||||
|
/>
|
||||||
|
</div>
|
||||||
|
);
|
||||||
|
});
|
73
ts/hooks/useComputePeaks.ts
Normal file
73
ts/hooks/useComputePeaks.ts
Normal file
|
@ -0,0 +1,73 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
import { noop } from 'lodash';
|
||||||
|
import { useEffect, useState } from 'react';
|
||||||
|
import { computePeaks } from '../components/VoiceNotesPlaybackContext';
|
||||||
|
import * as log from '../logging/log';
|
||||||
|
|
||||||
|
type WaveformData = {
|
||||||
|
peaks: ReadonlyArray<number>;
|
||||||
|
duration: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function useComputePeaks({
|
||||||
|
audioUrl,
|
||||||
|
activeDuration,
|
||||||
|
barCount,
|
||||||
|
onCorrupted,
|
||||||
|
}: {
|
||||||
|
audioUrl: string | undefined;
|
||||||
|
activeDuration: number | undefined;
|
||||||
|
barCount: number;
|
||||||
|
onCorrupted: () => void;
|
||||||
|
}): { peaks: ReadonlyArray<number>; hasPeaks: boolean; duration: number } {
|
||||||
|
const [waveformData, setWaveformData] = useState<WaveformData | undefined>(
|
||||||
|
undefined
|
||||||
|
);
|
||||||
|
|
||||||
|
// This effect loads audio file and computes its RMS peak for displaying the
|
||||||
|
// waveform.
|
||||||
|
useEffect(() => {
|
||||||
|
if (!audioUrl) {
|
||||||
|
return noop;
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info('MessageAudio: loading audio and computing waveform');
|
||||||
|
|
||||||
|
let canceled = false;
|
||||||
|
|
||||||
|
void (async () => {
|
||||||
|
try {
|
||||||
|
const { peaks: newPeaks, duration: newDuration } = await computePeaks(
|
||||||
|
audioUrl,
|
||||||
|
barCount
|
||||||
|
);
|
||||||
|
if (canceled) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
setWaveformData({
|
||||||
|
peaks: newPeaks,
|
||||||
|
duration: Math.max(newDuration, 1e-23),
|
||||||
|
});
|
||||||
|
} catch (err) {
|
||||||
|
log.error(
|
||||||
|
'MessageAudio: computePeaks error, marking as corrupted',
|
||||||
|
err
|
||||||
|
);
|
||||||
|
|
||||||
|
onCorrupted();
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
canceled = true;
|
||||||
|
};
|
||||||
|
}, [audioUrl, barCount, onCorrupted]);
|
||||||
|
|
||||||
|
return {
|
||||||
|
duration: waveformData?.duration ?? activeDuration ?? 1e-23,
|
||||||
|
hasPeaks: waveformData !== undefined,
|
||||||
|
peaks: waveformData?.peaks ?? new Array(barCount).fill(0),
|
||||||
|
};
|
||||||
|
}
|
|
@ -31,7 +31,7 @@ import { normalizeUuid } from '../util/normalizeUuid';
|
||||||
import { clearTimeoutIfNecessary } from '../util/clearTimeoutIfNecessary';
|
import { clearTimeoutIfNecessary } from '../util/clearTimeoutIfNecessary';
|
||||||
import type { AttachmentType, ThumbnailType } from '../types/Attachment';
|
import type { AttachmentType, ThumbnailType } from '../types/Attachment';
|
||||||
import { toDayMillis } from '../util/timestamp';
|
import { toDayMillis } from '../util/timestamp';
|
||||||
import { isGIF } from '../types/Attachment';
|
import { isGIF, isVoiceMessage } from '../types/Attachment';
|
||||||
import type { CallHistoryDetailsType } from '../types/Calling';
|
import type { CallHistoryDetailsType } from '../types/Calling';
|
||||||
import { CallMode } from '../types/Calling';
|
import { CallMode } from '../types/Calling';
|
||||||
import * as Conversation from '../types/Conversation';
|
import * as Conversation from '../types/Conversation';
|
||||||
|
@ -1015,6 +1015,7 @@ export class ConversationModel extends window.Backbone
|
||||||
|
|
||||||
hasDraft(): boolean {
|
hasDraft(): boolean {
|
||||||
const draftAttachments = this.get('draftAttachments') || [];
|
const draftAttachments = this.get('draftAttachments') || [];
|
||||||
|
|
||||||
return (this.get('draft') ||
|
return (this.get('draft') ||
|
||||||
this.get('quotedMessageId') ||
|
this.get('quotedMessageId') ||
|
||||||
draftAttachments.length > 0) as boolean;
|
draftAttachments.length > 0) as boolean;
|
||||||
|
@ -1031,6 +1032,12 @@ export class ConversationModel extends window.Backbone
|
||||||
|
|
||||||
const draftAttachments = this.get('draftAttachments') || [];
|
const draftAttachments = this.get('draftAttachments') || [];
|
||||||
if (draftAttachments.length > 0) {
|
if (draftAttachments.length > 0) {
|
||||||
|
if (isVoiceMessage(draftAttachments[0])) {
|
||||||
|
return window.i18n('message--getNotificationText--text-with-emoji', {
|
||||||
|
text: window.i18n('message--getNotificationText--voice-message'),
|
||||||
|
emoji: '🎤',
|
||||||
|
});
|
||||||
|
}
|
||||||
return window.i18n('Conversation--getDraftPreview--attachment');
|
return window.i18n('Conversation--getDraftPreview--attachment');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -75,6 +75,7 @@ export class RecorderClass {
|
||||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||||
audio: { mandatory: { googAutoGainControl: false } } as any,
|
audio: { mandatory: { googAutoGainControl: false } } as any,
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!this.context || !this.input) {
|
if (!this.context || !this.input) {
|
||||||
const err = new Error(
|
const err = new Error(
|
||||||
'Recorder/getUserMedia/stream: Missing context or input!'
|
'Recorder/getUserMedia/stream: Missing context or input!'
|
||||||
|
|
|
@ -92,8 +92,12 @@ class GlobalMessageAudio {
|
||||||
return this.#url;
|
return this.#url;
|
||||||
}
|
}
|
||||||
|
|
||||||
get duration() {
|
get duration(): number | undefined {
|
||||||
return this.#audio.duration;
|
// the underlying Audio element can return NaN if the audio hasn't loaded
|
||||||
|
// we filter out 0 or NaN as they are not useful values downstream
|
||||||
|
return Number.isNaN(this.#audio.duration) || this.#audio.duration === 0
|
||||||
|
? undefined
|
||||||
|
: this.#audio.duration;
|
||||||
}
|
}
|
||||||
|
|
||||||
get currentTime() {
|
get currentTime() {
|
||||||
|
|
|
@ -28,7 +28,14 @@ import { assertDev } from '../../util/assert';
|
||||||
|
|
||||||
// State
|
// State
|
||||||
|
|
||||||
export type AudioPlayerContent = ReadonlyDeep<{
|
/** Some audio identified by a URL (currently only used for drafts) */
|
||||||
|
type AudioPlayerContentDraft = ReadonlyDeep<{
|
||||||
|
conversationId: string;
|
||||||
|
url: string;
|
||||||
|
}>;
|
||||||
|
|
||||||
|
/** A voice note, with a queue for consecutive playback */
|
||||||
|
export type AudioPlayerContentVoiceNote = ReadonlyDeep<{
|
||||||
conversationId: string;
|
conversationId: string;
|
||||||
context: string;
|
context: string;
|
||||||
current: VoiceNoteForPlayback;
|
current: VoiceNoteForPlayback;
|
||||||
|
@ -46,9 +53,26 @@ export type ActiveAudioPlayerStateType = ReadonlyDeep<{
|
||||||
playbackRate: number;
|
playbackRate: number;
|
||||||
duration: number | undefined; // never zero or NaN
|
duration: number | undefined; // never zero or NaN
|
||||||
startPosition: number;
|
startPosition: number;
|
||||||
content: AudioPlayerContent;
|
content: AudioPlayerContentVoiceNote | AudioPlayerContentDraft;
|
||||||
}>;
|
}>;
|
||||||
|
|
||||||
|
/* eslint-disable @typescript-eslint/no-namespace */
|
||||||
|
export namespace AudioPlayerContent {
|
||||||
|
export function isVoiceNote(
|
||||||
|
content: ActiveAudioPlayerStateType['content']
|
||||||
|
): content is AudioPlayerContentVoiceNote {
|
||||||
|
return (
|
||||||
|
('current' as const satisfies keyof AudioPlayerContentVoiceNote) in
|
||||||
|
content
|
||||||
|
);
|
||||||
|
}
|
||||||
|
export function isDraft(
|
||||||
|
content: ActiveAudioPlayerStateType['content']
|
||||||
|
): content is AudioPlayerContentDraft {
|
||||||
|
return !isVoiceNote(content);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
export type AudioPlayerStateType = ReadonlyDeep<{
|
export type AudioPlayerStateType = ReadonlyDeep<{
|
||||||
active: ActiveAudioPlayerStateType | undefined;
|
active: ActiveAudioPlayerStateType | undefined;
|
||||||
}>;
|
}>;
|
||||||
|
@ -58,18 +82,10 @@ export type AudioPlayerStateType = ReadonlyDeep<{
|
||||||
export type SetMessageAudioAction = ReadonlyDeep<{
|
export type SetMessageAudioAction = ReadonlyDeep<{
|
||||||
type: 'audioPlayer/SET_MESSAGE_AUDIO';
|
type: 'audioPlayer/SET_MESSAGE_AUDIO';
|
||||||
payload:
|
payload:
|
||||||
| {
|
| ((AudioPlayerContentVoiceNote | AudioPlayerContentDraft) & {
|
||||||
conversationId: string;
|
|
||||||
context: string;
|
|
||||||
current: VoiceNoteForPlayback;
|
|
||||||
queue: ReadonlyArray<VoiceNoteForPlayback>;
|
|
||||||
isConsecutive: boolean;
|
|
||||||
// timestamp of the message following the last one in the queue
|
|
||||||
nextMessageTimestamp: number | undefined;
|
|
||||||
ourConversationId: string | undefined;
|
|
||||||
startPosition: number;
|
|
||||||
playbackRate: number;
|
playbackRate: number;
|
||||||
}
|
startPosition: number;
|
||||||
|
})
|
||||||
| undefined;
|
| undefined;
|
||||||
}>;
|
}>;
|
||||||
|
|
||||||
|
@ -115,7 +131,8 @@ type AudioPlayerActionType = ReadonlyDeep<
|
||||||
// Action Creators
|
// Action Creators
|
||||||
|
|
||||||
export const actions = {
|
export const actions = {
|
||||||
loadMessageAudio,
|
loadVoiceNoteAudio,
|
||||||
|
loadVoiceNoteDraftAudio,
|
||||||
setPlaybackRate,
|
setPlaybackRate,
|
||||||
currentTimeUpdated,
|
currentTimeUpdated,
|
||||||
durationChanged,
|
durationChanged,
|
||||||
|
@ -195,22 +212,24 @@ function setPlaybackRate(
|
||||||
/**
|
/**
|
||||||
* Load message audio into the "content", the smart MiniPlayer will then play it
|
* Load message audio into the "content", the smart MiniPlayer will then play it
|
||||||
*/
|
*/
|
||||||
function loadMessageAudio({
|
function loadVoiceNoteAudio({
|
||||||
voiceNoteData,
|
voiceNoteData,
|
||||||
position,
|
position,
|
||||||
context,
|
context,
|
||||||
ourConversationId,
|
ourConversationId,
|
||||||
|
playbackRate,
|
||||||
}: {
|
}: {
|
||||||
voiceNoteData: VoiceNoteAndConsecutiveForPlayback;
|
voiceNoteData: VoiceNoteAndConsecutiveForPlayback;
|
||||||
position: number;
|
position: number;
|
||||||
context: string;
|
context: string;
|
||||||
ourConversationId: string;
|
ourConversationId: string;
|
||||||
|
playbackRate: number;
|
||||||
}): SetMessageAudioAction {
|
}): SetMessageAudioAction {
|
||||||
const {
|
const {
|
||||||
conversationId,
|
conversationId,
|
||||||
voiceNote,
|
voiceNote,
|
||||||
consecutiveVoiceNotes,
|
consecutiveVoiceNotes,
|
||||||
playbackRate,
|
// playbackRate,
|
||||||
nextMessageTimestamp,
|
nextMessageTimestamp,
|
||||||
} = voiceNoteData;
|
} = voiceNoteData;
|
||||||
return {
|
return {
|
||||||
|
@ -229,6 +248,18 @@ function loadMessageAudio({
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function loadVoiceNoteDraftAudio(
|
||||||
|
content: AudioPlayerContentDraft & {
|
||||||
|
playbackRate: number;
|
||||||
|
startPosition: number;
|
||||||
|
}
|
||||||
|
): SetMessageAudioAction {
|
||||||
|
return {
|
||||||
|
type: 'audioPlayer/SET_MESSAGE_AUDIO',
|
||||||
|
payload: content,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
function setIsPlaying(value: boolean): SetIsPlayingAction {
|
function setIsPlaying(value: boolean): SetIsPlayingAction {
|
||||||
return {
|
return {
|
||||||
type: 'audioPlayer/SET_IS_PLAYING',
|
type: 'audioPlayer/SET_IS_PLAYING',
|
||||||
|
@ -272,6 +303,14 @@ export function reducer(
|
||||||
if (action.type === 'audioPlayer/SET_MESSAGE_AUDIO') {
|
if (action.type === 'audioPlayer/SET_MESSAGE_AUDIO') {
|
||||||
const { payload } = action;
|
const { payload } = action;
|
||||||
|
|
||||||
|
if (payload === undefined) {
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
active: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const { playbackRate, startPosition, ...content } = payload;
|
||||||
return {
|
return {
|
||||||
...state,
|
...state,
|
||||||
active:
|
active:
|
||||||
|
@ -281,9 +320,9 @@ export function reducer(
|
||||||
currentTime: 0,
|
currentTime: 0,
|
||||||
duration: undefined,
|
duration: undefined,
|
||||||
playing: true,
|
playing: true,
|
||||||
playbackRate: payload.playbackRate,
|
playbackRate,
|
||||||
content: payload,
|
content,
|
||||||
startPosition: payload.startPosition,
|
startPosition,
|
||||||
},
|
},
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
@ -363,6 +402,10 @@ export function reducer(
|
||||||
return state;
|
return state;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!AudioPlayerContent.isVoiceNote(content)) {
|
||||||
|
return state;
|
||||||
|
}
|
||||||
|
|
||||||
if (content.conversationId !== action.payload.conversationId) {
|
if (content.conversationId !== action.payload.conversationId) {
|
||||||
return state;
|
return state;
|
||||||
}
|
}
|
||||||
|
@ -436,6 +479,13 @@ export function reducer(
|
||||||
return state;
|
return state;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (AudioPlayerContent.isDraft(content)) {
|
||||||
|
return {
|
||||||
|
...state,
|
||||||
|
active: undefined,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
const { queue } = content;
|
const { queue } = content;
|
||||||
|
|
||||||
const [nextVoiceNote, ...newQueue] = queue;
|
const [nextVoiceNote, ...newQueue] = queue;
|
||||||
|
@ -475,6 +525,10 @@ export function reducer(
|
||||||
}
|
}
|
||||||
const { content } = active;
|
const { content } = active;
|
||||||
|
|
||||||
|
if (!AudioPlayerContent.isVoiceNote(content)) {
|
||||||
|
return state;
|
||||||
|
}
|
||||||
|
|
||||||
// if we deleted the message currently being played
|
// if we deleted the message currently being played
|
||||||
// move on to the next message
|
// move on to the next message
|
||||||
if (content.current.id === id) {
|
if (content.current.id === id) {
|
||||||
|
@ -532,6 +586,10 @@ export function reducer(
|
||||||
return state;
|
return state;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (AudioPlayerContent.isDraft(content)) {
|
||||||
|
return state;
|
||||||
|
}
|
||||||
|
|
||||||
const { id, data } = action.payload;
|
const { id, data } = action.payload;
|
||||||
|
|
||||||
const { attachments } = data;
|
const { attachments } = data;
|
||||||
|
|
|
@ -14,6 +14,7 @@ import { stringToMIMEType } from '../../types/MIME';
|
||||||
import type { BoundActionCreatorsMapObject } from '../../hooks/useBoundActions';
|
import type { BoundActionCreatorsMapObject } from '../../hooks/useBoundActions';
|
||||||
import { useBoundActions } from '../../hooks/useBoundActions';
|
import { useBoundActions } from '../../hooks/useBoundActions';
|
||||||
import { getComposerStateForConversation } from './composer';
|
import { getComposerStateForConversation } from './composer';
|
||||||
|
|
||||||
import * as Errors from '../../types/errors';
|
import * as Errors from '../../types/errors';
|
||||||
import {
|
import {
|
||||||
ErrorDialogAudioRecorderType,
|
ErrorDialogAudioRecorderType,
|
||||||
|
@ -73,8 +74,9 @@ export const actions = {
|
||||||
startRecording,
|
startRecording,
|
||||||
};
|
};
|
||||||
|
|
||||||
export const useActions = (): BoundActionCreatorsMapObject<typeof actions> =>
|
export const useAudioRecorderActions = (): BoundActionCreatorsMapObject<
|
||||||
useBoundActions(actions);
|
typeof actions
|
||||||
|
> => useBoundActions(actions);
|
||||||
|
|
||||||
function startRecording(
|
function startRecording(
|
||||||
conversationId: string
|
conversationId: string
|
||||||
|
@ -133,9 +135,9 @@ function completeRecordingAction(): CompleteRecordingAction {
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
function completeRecording(
|
export function completeRecording(
|
||||||
conversationId: string,
|
conversationId: string,
|
||||||
onSendAudioRecording?: (rec: InMemoryAttachmentDraftType) => unknown
|
onRecordingComplete: (rec: InMemoryAttachmentDraftType) => unknown
|
||||||
): ThunkAction<
|
): ThunkAction<
|
||||||
void,
|
void,
|
||||||
RootStateType,
|
RootStateType,
|
||||||
|
@ -172,9 +174,7 @@ function completeRecording(
|
||||||
flags: Proto.AttachmentPointer.Flags.VOICE_MESSAGE,
|
flags: Proto.AttachmentPointer.Flags.VOICE_MESSAGE,
|
||||||
};
|
};
|
||||||
|
|
||||||
if (onSendAudioRecording) {
|
onRecordingComplete(voiceNoteAttachment);
|
||||||
onSendAudioRecording(voiceNoteAttachment);
|
|
||||||
}
|
|
||||||
} finally {
|
} finally {
|
||||||
dispatch(completeRecordingAction());
|
dispatch(completeRecordingAction());
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,6 +34,7 @@ import {
|
||||||
REMOVE_PREVIEW as REMOVE_LINK_PREVIEW,
|
REMOVE_PREVIEW as REMOVE_LINK_PREVIEW,
|
||||||
} from './linkPreviews';
|
} from './linkPreviews';
|
||||||
import { LinkPreviewSourceType } from '../../types/LinkPreview';
|
import { LinkPreviewSourceType } from '../../types/LinkPreview';
|
||||||
|
import { completeRecording } from './audioRecorder';
|
||||||
import { RecordingState } from '../../types/AudioRecorder';
|
import { RecordingState } from '../../types/AudioRecorder';
|
||||||
import { SHOW_TOAST } from './toast';
|
import { SHOW_TOAST } from './toast';
|
||||||
import { ToastType } from '../../types/Toast';
|
import { ToastType } from '../../types/Toast';
|
||||||
|
@ -333,6 +334,28 @@ function scrollToQuotedMessage({
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
export function handleLeaveConversation(
|
||||||
|
conversationId: string
|
||||||
|
): ThunkAction<void, RootStateType, unknown, never> {
|
||||||
|
return (dispatch, getState) => {
|
||||||
|
const { audioRecorder } = getState();
|
||||||
|
|
||||||
|
if (audioRecorder.recordingState !== RecordingState.Recording) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// save draft of voice note
|
||||||
|
dispatch(
|
||||||
|
completeRecording(conversationId, attachment => {
|
||||||
|
dispatch(
|
||||||
|
addPendingAttachment(conversationId, { ...attachment, pending: true })
|
||||||
|
);
|
||||||
|
dispatch(addAttachment(conversationId, attachment));
|
||||||
|
})
|
||||||
|
);
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
function sendMultiMediaMessage(
|
function sendMultiMediaMessage(
|
||||||
conversationId: string,
|
conversationId: string,
|
||||||
options: {
|
options: {
|
||||||
|
@ -686,8 +709,23 @@ function addAttachment(
|
||||||
|
|
||||||
const conversation = window.ConversationController.get(conversationId);
|
const conversation = window.ConversationController.get(conversationId);
|
||||||
if (conversation) {
|
if (conversation) {
|
||||||
conversation.attributes.draftAttachments = nextAttachments;
|
conversation.set({
|
||||||
conversation.attributes.draftChanged = true;
|
draftAttachments: nextAttachments,
|
||||||
|
draftChanged: true,
|
||||||
|
});
|
||||||
|
|
||||||
|
// if the conversation has already unloaded
|
||||||
|
if (!isSelectedConversation) {
|
||||||
|
const now = Date.now();
|
||||||
|
const activeAt = conversation.get('active_at') || now;
|
||||||
|
conversation.set({
|
||||||
|
active_at: activeAt,
|
||||||
|
draftChanged: false,
|
||||||
|
draftTimestamp: now,
|
||||||
|
timestamp: now,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
window.Signal.Data.updateConversation(conversation.attributes);
|
window.Signal.Data.updateConversation(conversation.attributes);
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
|
@ -136,15 +136,16 @@ import { UUIDKind } from '../../types/UUID';
|
||||||
import { removeLinkPreview } from '../../services/LinkPreview';
|
import { removeLinkPreview } from '../../services/LinkPreview';
|
||||||
import type {
|
import type {
|
||||||
ReplaceAttachmentsActionType,
|
ReplaceAttachmentsActionType,
|
||||||
|
ResetComposerActionType,
|
||||||
SetFocusActionType,
|
SetFocusActionType,
|
||||||
SetQuotedMessageActionType,
|
SetQuotedMessageActionType,
|
||||||
ResetComposerActionType,
|
|
||||||
} from './composer';
|
} from './composer';
|
||||||
import {
|
import {
|
||||||
replaceAttachments,
|
replaceAttachments,
|
||||||
setComposerFocus,
|
setComposerFocus,
|
||||||
setQuoteByMessageId,
|
setQuoteByMessageId,
|
||||||
resetComposer,
|
resetComposer,
|
||||||
|
handleLeaveConversation,
|
||||||
} from './composer';
|
} from './composer';
|
||||||
import { ReceiptType } from '../../types/Receipt';
|
import { ReceiptType } from '../../types/Receipt';
|
||||||
|
|
||||||
|
@ -3535,6 +3536,12 @@ function showConversation({
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// notify composer in case we need to stop recording a voice note
|
||||||
|
if (conversations.selectedConversationId) {
|
||||||
|
log.error('conversations - handleLeave');
|
||||||
|
dispatch(handleLeaveConversation(conversations.selectedConversationId));
|
||||||
|
}
|
||||||
|
|
||||||
dispatch({
|
dispatch({
|
||||||
type: SELECTED_CONVERSATION_CHANGED,
|
type: SELECTED_CONVERSATION_CHANGED,
|
||||||
payload: {
|
payload: {
|
||||||
|
|
|
@ -7,7 +7,6 @@ import filesize from 'filesize';
|
||||||
import getDirection from 'direction';
|
import getDirection from 'direction';
|
||||||
import emojiRegex from 'emoji-regex';
|
import emojiRegex from 'emoji-regex';
|
||||||
import LinkifyIt from 'linkify-it';
|
import LinkifyIt from 'linkify-it';
|
||||||
|
|
||||||
import type { StateType } from '../reducer';
|
import type { StateType } from '../reducer';
|
||||||
import type {
|
import type {
|
||||||
LastMessageStatus,
|
LastMessageStatus,
|
||||||
|
|
|
@ -1,6 +1,7 @@
|
||||||
// Copyright 2019 Signal Messenger, LLC
|
// Copyright 2019 Signal Messenger, LLC
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
import React from 'react';
|
||||||
import { connect } from 'react-redux';
|
import { connect } from 'react-redux';
|
||||||
import { get } from 'lodash';
|
import { get } from 'lodash';
|
||||||
import { mapDispatchToProps } from '../actions';
|
import { mapDispatchToProps } from '../actions';
|
||||||
|
@ -31,6 +32,10 @@ import {
|
||||||
} from '../selectors/stickers';
|
} from '../selectors/stickers';
|
||||||
import { isSignalConversation } from '../../util/isSignalConversation';
|
import { isSignalConversation } from '../../util/isSignalConversation';
|
||||||
import { getComposerStateForConversationIdSelector } from '../selectors/composer';
|
import { getComposerStateForConversationIdSelector } from '../selectors/composer';
|
||||||
|
import type { SmartCompositionRecordingProps } from './CompositionRecording';
|
||||||
|
import { SmartCompositionRecording } from './CompositionRecording';
|
||||||
|
import type { SmartCompositionRecordingDraftProps } from './CompositionRecordingDraft';
|
||||||
|
import { SmartCompositionRecordingDraft } from './CompositionRecordingDraft';
|
||||||
|
|
||||||
type ExternalProps = {
|
type ExternalProps = {
|
||||||
id: string;
|
id: string;
|
||||||
|
@ -145,6 +150,16 @@ const mapStateToProps = (state: StateType, props: ExternalProps) => {
|
||||||
|
|
||||||
draftText: dropNull(draftText),
|
draftText: dropNull(draftText),
|
||||||
draftBodyRanges,
|
draftBodyRanges,
|
||||||
|
renderSmartCompositionRecording: (
|
||||||
|
recProps: SmartCompositionRecordingProps
|
||||||
|
) => {
|
||||||
|
return <SmartCompositionRecording {...recProps} />;
|
||||||
|
},
|
||||||
|
renderSmartCompositionRecordingDraft: (
|
||||||
|
draftProps: SmartCompositionRecordingDraftProps
|
||||||
|
) => {
|
||||||
|
return <SmartCompositionRecordingDraft {...draftProps} />;
|
||||||
|
},
|
||||||
};
|
};
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
59
ts/state/smart/CompositionRecording.tsx
Normal file
59
ts/state/smart/CompositionRecording.tsx
Normal file
|
@ -0,0 +1,59 @@
|
||||||
|
// Copyright 2022 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
import React, { useCallback } from 'react';
|
||||||
|
import { useSelector } from 'react-redux';
|
||||||
|
import { CompositionRecording } from '../../components/CompositionRecording';
|
||||||
|
import { mapDispatchToProps } from '../actions';
|
||||||
|
import { useAudioRecorderActions } from '../ducks/audioRecorder';
|
||||||
|
import { useComposerActions } from '../ducks/composer';
|
||||||
|
import { getSelectedConversationId } from '../selectors/conversations';
|
||||||
|
import { getIntl } from '../selectors/user';
|
||||||
|
|
||||||
|
export type SmartCompositionRecordingProps = {
|
||||||
|
onBeforeSend: () => void;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function SmartCompositionRecording({
|
||||||
|
onBeforeSend,
|
||||||
|
}: SmartCompositionRecordingProps): JSX.Element | null {
|
||||||
|
const i18n = useSelector(getIntl);
|
||||||
|
const selectedConversationId = useSelector(getSelectedConversationId);
|
||||||
|
const { cancelRecording, completeRecording } = useAudioRecorderActions();
|
||||||
|
|
||||||
|
const { sendMultiMediaMessage } = useComposerActions();
|
||||||
|
|
||||||
|
const handleCancel = useCallback(() => {
|
||||||
|
cancelRecording();
|
||||||
|
}, [cancelRecording]);
|
||||||
|
|
||||||
|
const handleSend = useCallback(() => {
|
||||||
|
if (selectedConversationId) {
|
||||||
|
completeRecording(selectedConversationId, voiceNoteAttachment => {
|
||||||
|
onBeforeSend();
|
||||||
|
sendMultiMediaMessage(selectedConversationId, { voiceNoteAttachment });
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, [
|
||||||
|
selectedConversationId,
|
||||||
|
completeRecording,
|
||||||
|
onBeforeSend,
|
||||||
|
sendMultiMediaMessage,
|
||||||
|
]);
|
||||||
|
|
||||||
|
if (!selectedConversationId) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
return (
|
||||||
|
<CompositionRecording
|
||||||
|
i18n={i18n}
|
||||||
|
conversationId={selectedConversationId}
|
||||||
|
onCancel={handleCancel}
|
||||||
|
onSend={handleSend}
|
||||||
|
errorRecording={mapDispatchToProps.errorRecording}
|
||||||
|
addAttachment={mapDispatchToProps.addAttachment}
|
||||||
|
completeRecording={mapDispatchToProps.completeRecording}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
156
ts/state/smart/CompositionRecordingDraft.tsx
Normal file
156
ts/state/smart/CompositionRecordingDraft.tsx
Normal file
|
@ -0,0 +1,156 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
import React, { useCallback } from 'react';
|
||||||
|
import { useSelector } from 'react-redux';
|
||||||
|
import { CompositionRecordingDraft } from '../../components/CompositionRecordingDraft';
|
||||||
|
import type { AttachmentDraftType } from '../../types/Attachment';
|
||||||
|
import {
|
||||||
|
AudioPlayerContent,
|
||||||
|
useAudioPlayerActions,
|
||||||
|
} from '../ducks/audioPlayer';
|
||||||
|
import { useComposerActions } from '../ducks/composer';
|
||||||
|
import { selectAudioPlayerActive } from '../selectors/audioPlayer';
|
||||||
|
import {
|
||||||
|
getConversationByIdSelector,
|
||||||
|
getSelectedConversationId,
|
||||||
|
} from '../selectors/conversations';
|
||||||
|
import { getIntl } from '../selectors/user';
|
||||||
|
|
||||||
|
export type SmartCompositionRecordingDraftProps = {
|
||||||
|
voiceNoteAttachment: AttachmentDraftType;
|
||||||
|
};
|
||||||
|
|
||||||
|
export function SmartCompositionRecordingDraft({
|
||||||
|
voiceNoteAttachment,
|
||||||
|
}: SmartCompositionRecordingDraftProps): JSX.Element {
|
||||||
|
const i18n = useSelector(getIntl);
|
||||||
|
const active = useSelector(selectAudioPlayerActive);
|
||||||
|
const selectedConversationId = useSelector(getSelectedConversationId);
|
||||||
|
const getConversationById = useSelector(getConversationByIdSelector);
|
||||||
|
const {
|
||||||
|
loadVoiceNoteDraftAudio,
|
||||||
|
unloadMessageAudio,
|
||||||
|
setIsPlaying,
|
||||||
|
setPosition,
|
||||||
|
} = useAudioPlayerActions();
|
||||||
|
const { sendMultiMediaMessage, removeAttachment } = useComposerActions();
|
||||||
|
|
||||||
|
if (!selectedConversationId) {
|
||||||
|
throw new Error('No selected conversation');
|
||||||
|
}
|
||||||
|
|
||||||
|
const playbackRate =
|
||||||
|
getConversationById(selectedConversationId)?.voiceNotePlaybackRate ?? 1;
|
||||||
|
|
||||||
|
const audioUrl = !voiceNoteAttachment.pending
|
||||||
|
? voiceNoteAttachment.url
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
const content = active?.content;
|
||||||
|
|
||||||
|
const draftActive =
|
||||||
|
content && AudioPlayerContent.isDraft(content) && content.url === audioUrl
|
||||||
|
? active
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
const handlePlay = useCallback(
|
||||||
|
(positionAsRatio?: number) => {
|
||||||
|
if (!draftActive && audioUrl) {
|
||||||
|
loadVoiceNoteDraftAudio({
|
||||||
|
conversationId: selectedConversationId,
|
||||||
|
url: audioUrl,
|
||||||
|
startPosition: positionAsRatio ?? 0,
|
||||||
|
playbackRate,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
if (draftActive) {
|
||||||
|
if (positionAsRatio !== undefined) {
|
||||||
|
setPosition(positionAsRatio);
|
||||||
|
}
|
||||||
|
if (!draftActive.playing) {
|
||||||
|
setIsPlaying(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[
|
||||||
|
draftActive,
|
||||||
|
audioUrl,
|
||||||
|
loadVoiceNoteDraftAudio,
|
||||||
|
selectedConversationId,
|
||||||
|
playbackRate,
|
||||||
|
setPosition,
|
||||||
|
setIsPlaying,
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
const handlePause = useCallback(() => {
|
||||||
|
setIsPlaying(false);
|
||||||
|
}, [setIsPlaying]);
|
||||||
|
|
||||||
|
const handleSend = useCallback(() => {
|
||||||
|
if (selectedConversationId) {
|
||||||
|
sendMultiMediaMessage(selectedConversationId, {
|
||||||
|
draftAttachments: [voiceNoteAttachment],
|
||||||
|
});
|
||||||
|
}
|
||||||
|
}, [selectedConversationId, sendMultiMediaMessage, voiceNoteAttachment]);
|
||||||
|
|
||||||
|
const handleCancel = useCallback(() => {
|
||||||
|
unloadMessageAudio();
|
||||||
|
if (selectedConversationId && voiceNoteAttachment.path) {
|
||||||
|
removeAttachment(selectedConversationId, voiceNoteAttachment.path);
|
||||||
|
}
|
||||||
|
}, [
|
||||||
|
removeAttachment,
|
||||||
|
selectedConversationId,
|
||||||
|
unloadMessageAudio,
|
||||||
|
voiceNoteAttachment.path,
|
||||||
|
]);
|
||||||
|
|
||||||
|
const handleScrub = useCallback(
|
||||||
|
(positionAsRatio: number) => {
|
||||||
|
// if scrubbing when audio not loaded
|
||||||
|
if (!draftActive && audioUrl) {
|
||||||
|
loadVoiceNoteDraftAudio({
|
||||||
|
conversationId: selectedConversationId,
|
||||||
|
url: audioUrl,
|
||||||
|
startPosition: positionAsRatio,
|
||||||
|
playbackRate,
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if scrubbing when audio is loaded
|
||||||
|
if (draftActive) {
|
||||||
|
setPosition(positionAsRatio);
|
||||||
|
|
||||||
|
if (draftActive?.playing) {
|
||||||
|
setIsPlaying(true);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
[
|
||||||
|
audioUrl,
|
||||||
|
draftActive,
|
||||||
|
loadVoiceNoteDraftAudio,
|
||||||
|
playbackRate,
|
||||||
|
selectedConversationId,
|
||||||
|
setIsPlaying,
|
||||||
|
setPosition,
|
||||||
|
]
|
||||||
|
);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<CompositionRecordingDraft
|
||||||
|
i18n={i18n}
|
||||||
|
audioUrl={audioUrl}
|
||||||
|
active={draftActive}
|
||||||
|
onCancel={handleCancel}
|
||||||
|
onSend={handleSend}
|
||||||
|
onPlay={handlePlay}
|
||||||
|
onPause={handlePause}
|
||||||
|
onScrub={handleScrub}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
|
@ -6,7 +6,10 @@ import { useSelector } from 'react-redux';
|
||||||
import { MessageAudio } from '../../components/conversation/MessageAudio';
|
import { MessageAudio } from '../../components/conversation/MessageAudio';
|
||||||
import type { OwnProps as MessageAudioOwnProps } from '../../components/conversation/MessageAudio';
|
import type { OwnProps as MessageAudioOwnProps } from '../../components/conversation/MessageAudio';
|
||||||
import type { ActiveAudioPlayerStateType } from '../ducks/audioPlayer';
|
import type { ActiveAudioPlayerStateType } from '../ducks/audioPlayer';
|
||||||
import { useAudioPlayerActions } from '../ducks/audioPlayer';
|
import {
|
||||||
|
AudioPlayerContent,
|
||||||
|
useAudioPlayerActions,
|
||||||
|
} from '../ducks/audioPlayer';
|
||||||
import {
|
import {
|
||||||
selectAudioPlayerActive,
|
selectAudioPlayerActive,
|
||||||
selectVoiceNoteAndConsecutive,
|
selectVoiceNoteAndConsecutive,
|
||||||
|
@ -14,6 +17,10 @@ import {
|
||||||
import { useConversationsActions } from '../ducks/conversations';
|
import { useConversationsActions } from '../ducks/conversations';
|
||||||
import { getUserConversationId } from '../selectors/user';
|
import { getUserConversationId } from '../selectors/user';
|
||||||
import * as log from '../../logging/log';
|
import * as log from '../../logging/log';
|
||||||
|
import {
|
||||||
|
getConversationByIdSelector,
|
||||||
|
getSelectedConversationId,
|
||||||
|
} from '../selectors/conversations';
|
||||||
|
|
||||||
export type Props = Omit<MessageAudioOwnProps, 'active' | 'onPlayMessage'> & {
|
export type Props = Omit<MessageAudioOwnProps, 'active' | 'onPlayMessage'> & {
|
||||||
renderingContext: string;
|
renderingContext: string;
|
||||||
|
@ -24,18 +31,28 @@ export function SmartMessageAudio({
|
||||||
...props
|
...props
|
||||||
}: Props): JSX.Element | null {
|
}: Props): JSX.Element | null {
|
||||||
const active = useSelector(selectAudioPlayerActive);
|
const active = useSelector(selectAudioPlayerActive);
|
||||||
const { loadMessageAudio, setIsPlaying, setPlaybackRate, setPosition } =
|
const { loadVoiceNoteAudio, setIsPlaying, setPlaybackRate, setPosition } =
|
||||||
useAudioPlayerActions();
|
useAudioPlayerActions();
|
||||||
const { pushPanelForConversation } = useConversationsActions();
|
const { pushPanelForConversation } = useConversationsActions();
|
||||||
|
|
||||||
const getVoiceNoteData = useSelector(selectVoiceNoteAndConsecutive);
|
const getVoiceNoteData = useSelector(selectVoiceNoteAndConsecutive);
|
||||||
const ourConversationId = useSelector(getUserConversationId);
|
const ourConversationId = useSelector(getUserConversationId);
|
||||||
|
const getConversationById = useSelector(getConversationByIdSelector);
|
||||||
|
const selectedConversationId = useSelector(getSelectedConversationId);
|
||||||
|
|
||||||
|
if (!selectedConversationId) {
|
||||||
|
throw new Error('No selected conversation');
|
||||||
|
}
|
||||||
|
const playbackRate =
|
||||||
|
getConversationById(selectedConversationId)?.voiceNotePlaybackRate ?? 1;
|
||||||
|
|
||||||
|
const content = active?.content;
|
||||||
|
|
||||||
const messageActive: ActiveAudioPlayerStateType | undefined =
|
const messageActive: ActiveAudioPlayerStateType | undefined =
|
||||||
active &&
|
content &&
|
||||||
active.content &&
|
AudioPlayerContent.isVoiceNote(content) &&
|
||||||
active.content.current.id === props.id &&
|
content.current.id === props.id &&
|
||||||
active.content.context === renderingContext
|
content.context === renderingContext
|
||||||
? active
|
? active
|
||||||
: undefined;
|
: undefined;
|
||||||
|
|
||||||
|
@ -55,14 +72,21 @@ export function SmartMessageAudio({
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
loadMessageAudio({
|
loadVoiceNoteAudio({
|
||||||
voiceNoteData,
|
voiceNoteData,
|
||||||
position,
|
position,
|
||||||
context: renderingContext,
|
context: renderingContext,
|
||||||
ourConversationId,
|
ourConversationId,
|
||||||
|
playbackRate,
|
||||||
});
|
});
|
||||||
},
|
},
|
||||||
[getVoiceNoteData, loadMessageAudio, ourConversationId, renderingContext]
|
[
|
||||||
|
getVoiceNoteData,
|
||||||
|
loadVoiceNoteAudio,
|
||||||
|
ourConversationId,
|
||||||
|
renderingContext,
|
||||||
|
playbackRate,
|
||||||
|
]
|
||||||
);
|
);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
|
|
@ -4,7 +4,10 @@
|
||||||
import React, { useCallback } from 'react';
|
import React, { useCallback } from 'react';
|
||||||
import { useSelector } from 'react-redux';
|
import { useSelector } from 'react-redux';
|
||||||
import { MiniPlayer, PlayerState } from '../../components/MiniPlayer';
|
import { MiniPlayer, PlayerState } from '../../components/MiniPlayer';
|
||||||
import { useAudioPlayerActions } from '../ducks/audioPlayer';
|
import {
|
||||||
|
AudioPlayerContent,
|
||||||
|
useAudioPlayerActions,
|
||||||
|
} from '../ducks/audioPlayer';
|
||||||
import {
|
import {
|
||||||
selectAudioPlayerActive,
|
selectAudioPlayerActive,
|
||||||
selectVoiceNoteTitle,
|
selectVoiceNoteTitle,
|
||||||
|
@ -30,15 +33,25 @@ export function SmartMiniPlayer(): JSX.Element | null {
|
||||||
return null;
|
return null;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const { content } = active;
|
||||||
|
|
||||||
|
const url = AudioPlayerContent.isVoiceNote(content)
|
||||||
|
? content.current.url
|
||||||
|
: content.url;
|
||||||
|
|
||||||
let state = PlayerState.loading;
|
let state = PlayerState.loading;
|
||||||
if (active.content.current.url) {
|
if (url) {
|
||||||
state = active.playing ? PlayerState.playing : PlayerState.paused;
|
state = active.playing ? PlayerState.playing : PlayerState.paused;
|
||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<MiniPlayer
|
<MiniPlayer
|
||||||
i18n={i18n}
|
i18n={i18n}
|
||||||
title={getVoiceNoteTitle(active.content.current)}
|
title={
|
||||||
|
AudioPlayerContent.isDraft(content)
|
||||||
|
? i18n('you')
|
||||||
|
: getVoiceNoteTitle(content.current)
|
||||||
|
}
|
||||||
onPlay={handlePlay}
|
onPlay={handlePlay}
|
||||||
onPause={handlePause}
|
onPause={handlePause}
|
||||||
onPlaybackRate={setPlaybackRate}
|
onPlaybackRate={setPlaybackRate}
|
||||||
|
|
|
@ -6,7 +6,10 @@ import { useSelector } from 'react-redux';
|
||||||
import type { VoiceNotesPlaybackProps } from '../../components/VoiceNotesPlaybackContext';
|
import type { VoiceNotesPlaybackProps } from '../../components/VoiceNotesPlaybackContext';
|
||||||
import { VoiceNotesPlaybackProvider } from '../../components/VoiceNotesPlaybackContext';
|
import { VoiceNotesPlaybackProvider } from '../../components/VoiceNotesPlaybackContext';
|
||||||
import { selectAudioPlayerActive } from '../selectors/audioPlayer';
|
import { selectAudioPlayerActive } from '../selectors/audioPlayer';
|
||||||
import { useAudioPlayerActions } from '../ducks/audioPlayer';
|
import {
|
||||||
|
AudioPlayerContent,
|
||||||
|
useAudioPlayerActions,
|
||||||
|
} from '../ducks/audioPlayer';
|
||||||
import { globalMessageAudio } from '../../services/globalMessageAudio';
|
import { globalMessageAudio } from '../../services/globalMessageAudio';
|
||||||
import { strictAssert } from '../../util/assert';
|
import { strictAssert } from '../../util/assert';
|
||||||
import * as log from '../../logging/log';
|
import * as log from '../../logging/log';
|
||||||
|
@ -36,8 +39,22 @@ export function SmartVoiceNotesPlaybackProvider(
|
||||||
const previousStartPosition = usePrevious(undefined, active?.startPosition);
|
const previousStartPosition = usePrevious(undefined, active?.startPosition);
|
||||||
|
|
||||||
const content = active?.content;
|
const content = active?.content;
|
||||||
const current = content?.current;
|
let url: undefined | string;
|
||||||
const url = current?.url;
|
let messageId: undefined | string;
|
||||||
|
let messageIdForLogging: undefined | string;
|
||||||
|
let playNextConsecutiveSound = false;
|
||||||
|
let playFinishConsecutiveSound = false;
|
||||||
|
|
||||||
|
if (content && AudioPlayerContent.isVoiceNote(content)) {
|
||||||
|
({ url, id: messageId } = content.current);
|
||||||
|
messageIdForLogging = content.current.messageIdForLogging;
|
||||||
|
playNextConsecutiveSound = content.isConsecutive;
|
||||||
|
playFinishConsecutiveSound =
|
||||||
|
content.isConsecutive && content.queue.length === 0;
|
||||||
|
}
|
||||||
|
if (content && AudioPlayerContent.isDraft(content)) {
|
||||||
|
url = content.url;
|
||||||
|
}
|
||||||
|
|
||||||
const {
|
const {
|
||||||
messageAudioEnded,
|
messageAudioEnded,
|
||||||
|
@ -49,7 +66,7 @@ export function SmartVoiceNotesPlaybackProvider(
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
// if we don't have a new audio source
|
// if we don't have a new audio source
|
||||||
// just control playback
|
// just control playback
|
||||||
if (!content || !current || !url || url === globalMessageAudio.url) {
|
if (!content || !url || url === globalMessageAudio.url) {
|
||||||
if (!active?.playing && globalMessageAudio.playing) {
|
if (!active?.playing && globalMessageAudio.playing) {
|
||||||
globalMessageAudio.pause();
|
globalMessageAudio.pause();
|
||||||
}
|
}
|
||||||
|
@ -65,71 +82,52 @@ export function SmartVoiceNotesPlaybackProvider(
|
||||||
if (
|
if (
|
||||||
active &&
|
active &&
|
||||||
active.startPosition !== undefined &&
|
active.startPosition !== undefined &&
|
||||||
active.startPosition !== previousStartPosition
|
active.startPosition !== previousStartPosition &&
|
||||||
|
globalMessageAudio.duration !== undefined
|
||||||
) {
|
) {
|
||||||
globalMessageAudio.currentTime =
|
globalMessageAudio.currentTime =
|
||||||
active.startPosition * globalMessageAudio.duration;
|
active.startPosition * globalMessageAudio.duration;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (!active?.playing && globalMessageAudio.playing) {
|
||||||
|
globalMessageAudio.pause();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (active?.playing && !globalMessageAudio.playing) {
|
||||||
|
globalMessageAudio.play();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (active && active.playbackRate !== globalMessageAudio.playbackRate) {
|
||||||
|
globalMessageAudio.playbackRate = active.playbackRate;
|
||||||
|
}
|
||||||
|
|
||||||
|
// if user requested a new position
|
||||||
|
if (
|
||||||
|
active &&
|
||||||
|
active.startPosition !== undefined &&
|
||||||
|
active.startPosition !== previousStartPosition &&
|
||||||
|
active.duration
|
||||||
|
) {
|
||||||
|
globalMessageAudio.currentTime = active.startPosition * active.duration;
|
||||||
|
}
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
// otherwise we have a new audio source
|
// if we have a new audio source
|
||||||
// we just load it and play it
|
loadAudio({
|
||||||
globalMessageAudio.load({
|
|
||||||
url,
|
url,
|
||||||
playbackRate: active.playbackRate,
|
playbackRate: active.playbackRate,
|
||||||
onLoadedMetadata() {
|
messageId,
|
||||||
strictAssert(
|
messageIdForLogging,
|
||||||
!Number.isNaN(globalMessageAudio.duration),
|
startPosition: active.startPosition,
|
||||||
'Audio should have definite duration on `loadedmetadata` event'
|
playFinishConsecutiveSound,
|
||||||
);
|
durationChanged,
|
||||||
log.info(
|
unloadMessageAudio,
|
||||||
'SmartVoiceNotesPlaybackProvider: `loadedmetadata` event',
|
currentTimeUpdated,
|
||||||
current.id
|
messageAudioEnded,
|
||||||
);
|
|
||||||
if (active.startPosition !== 0) {
|
|
||||||
globalMessageAudio.currentTime =
|
|
||||||
active.startPosition * globalMessageAudio.duration;
|
|
||||||
}
|
|
||||||
},
|
|
||||||
onDurationChange() {
|
|
||||||
log.info(
|
|
||||||
'SmartVoiceNotesPlaybackProvider: `durationchange` event',
|
|
||||||
current.id
|
|
||||||
);
|
|
||||||
const reportedDuration = globalMessageAudio.duration;
|
|
||||||
|
|
||||||
// the underlying Audio element can return NaN if the audio hasn't loaded
|
|
||||||
// we filter out 0 or NaN as they are not useful values downstream
|
|
||||||
const newDuration =
|
|
||||||
Number.isNaN(reportedDuration) || reportedDuration === 0
|
|
||||||
? undefined
|
|
||||||
: reportedDuration;
|
|
||||||
durationChanged(newDuration);
|
|
||||||
},
|
|
||||||
onTimeUpdate() {
|
|
||||||
currentTimeUpdated(globalMessageAudio.currentTime);
|
|
||||||
},
|
|
||||||
onEnded() {
|
|
||||||
if (content.isConsecutive && content.queue.length === 0) {
|
|
||||||
void stateChangeConfirmDownSound.play();
|
|
||||||
}
|
|
||||||
messageAudioEnded();
|
|
||||||
},
|
|
||||||
onError(error) {
|
|
||||||
log.error(
|
|
||||||
'SmartVoiceNotesPlaybackProvider: playback error',
|
|
||||||
current.messageIdForLogging,
|
|
||||||
Errors.toLogFormat(error)
|
|
||||||
);
|
|
||||||
unloadMessageAudio();
|
|
||||||
},
|
|
||||||
});
|
});
|
||||||
|
|
||||||
// if this message was part of the queue (consecutive, added indirectly)
|
if (playNextConsecutiveSound) {
|
||||||
// we play a note to let the user we're onto a new message
|
|
||||||
// (false for the first message in a consecutive group, since the user initiated it)
|
|
||||||
if (content.isConsecutive) {
|
|
||||||
// eslint-disable-next-line more/no-then
|
// eslint-disable-next-line more/no-then
|
||||||
void stateChangeConfirmUpSound.play().then(() => {
|
void stateChangeConfirmUpSound.play().then(() => {
|
||||||
globalMessageAudio.play();
|
globalMessageAudio.play();
|
||||||
|
@ -138,17 +136,101 @@ export function SmartVoiceNotesPlaybackProvider(
|
||||||
globalMessageAudio.play();
|
globalMessageAudio.play();
|
||||||
}
|
}
|
||||||
|
|
||||||
if (!current.isPlayed) {
|
if (AudioPlayerContent.isVoiceNote(content)) {
|
||||||
const message = conversations.messagesLookup[current.id];
|
if (!content.current.isPlayed) {
|
||||||
if (message && message.seenStatus !== SeenStatus.Unseen) {
|
const message = conversations.messagesLookup[content.current.id];
|
||||||
markViewed(current.id);
|
if (message && message.seenStatus !== SeenStatus.Unseen) {
|
||||||
|
markViewed(content.current.id);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
log.info('SmartVoiceNotesPlaybackProvider: message already played', {
|
||||||
|
message: content.current.messageIdForLogging,
|
||||||
|
});
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
log.info('SmartVoiceNotesPlaybackProvider: message already played', {
|
|
||||||
message: current.messageIdForLogging,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
});
|
}, [
|
||||||
|
active,
|
||||||
|
content,
|
||||||
|
conversations.messagesLookup,
|
||||||
|
currentTimeUpdated,
|
||||||
|
durationChanged,
|
||||||
|
messageAudioEnded,
|
||||||
|
messageId,
|
||||||
|
messageIdForLogging,
|
||||||
|
playFinishConsecutiveSound,
|
||||||
|
playNextConsecutiveSound,
|
||||||
|
previousStartPosition,
|
||||||
|
unloadMessageAudio,
|
||||||
|
url,
|
||||||
|
]);
|
||||||
|
|
||||||
return <VoiceNotesPlaybackProvider {...props} />;
|
return <VoiceNotesPlaybackProvider {...props} />;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function loadAudio({
|
||||||
|
url,
|
||||||
|
playbackRate,
|
||||||
|
messageId,
|
||||||
|
messageIdForLogging,
|
||||||
|
startPosition,
|
||||||
|
playFinishConsecutiveSound,
|
||||||
|
durationChanged,
|
||||||
|
currentTimeUpdated,
|
||||||
|
messageAudioEnded,
|
||||||
|
unloadMessageAudio,
|
||||||
|
}: {
|
||||||
|
url: string;
|
||||||
|
playbackRate: number;
|
||||||
|
messageId: string | undefined;
|
||||||
|
messageIdForLogging: string | undefined;
|
||||||
|
startPosition: number;
|
||||||
|
playFinishConsecutiveSound: boolean;
|
||||||
|
durationChanged: (value: number | undefined) => void;
|
||||||
|
currentTimeUpdated: (value: number) => void;
|
||||||
|
messageAudioEnded: () => void;
|
||||||
|
unloadMessageAudio: () => void;
|
||||||
|
}) {
|
||||||
|
globalMessageAudio.load({
|
||||||
|
url,
|
||||||
|
playbackRate,
|
||||||
|
onLoadedMetadata() {
|
||||||
|
strictAssert(
|
||||||
|
globalMessageAudio.duration !== undefined,
|
||||||
|
'Audio should have definite duration on `loadedmetadata` event'
|
||||||
|
);
|
||||||
|
log.info(
|
||||||
|
'SmartVoiceNotesPlaybackProvider: `loadedmetadata` event',
|
||||||
|
messageId
|
||||||
|
);
|
||||||
|
if (startPosition !== 0) {
|
||||||
|
globalMessageAudio.currentTime =
|
||||||
|
startPosition * globalMessageAudio.duration;
|
||||||
|
}
|
||||||
|
durationChanged(globalMessageAudio.duration);
|
||||||
|
},
|
||||||
|
onDurationChange() {
|
||||||
|
log.info(
|
||||||
|
'SmartVoiceNotesPlaybackProvider: `durationchange` event',
|
||||||
|
messageId
|
||||||
|
);
|
||||||
|
durationChanged(globalMessageAudio.duration);
|
||||||
|
},
|
||||||
|
onTimeUpdate() {
|
||||||
|
currentTimeUpdated(globalMessageAudio.currentTime);
|
||||||
|
},
|
||||||
|
onEnded() {
|
||||||
|
if (playFinishConsecutiveSound) {
|
||||||
|
void stateChangeConfirmDownSound.play();
|
||||||
|
}
|
||||||
|
messageAudioEnded();
|
||||||
|
},
|
||||||
|
onError(error) {
|
||||||
|
log.error(
|
||||||
|
'SmartVoiceNotesPlaybackProvider: playback error',
|
||||||
|
messageIdForLogging,
|
||||||
|
Errors.toLogFormat(error)
|
||||||
|
);
|
||||||
|
unloadMessageAudio();
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
114
ts/state/smart/Waveform.tsx
Normal file
114
ts/state/smart/Waveform.tsx
Normal file
|
@ -0,0 +1,114 @@
|
||||||
|
// Copyright 2023 Signal Messenger, LLC
|
||||||
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
|
import { noop } from 'lodash';
|
||||||
|
import React, { useEffect, useState } from 'react';
|
||||||
|
import { Waveform } from '../../components/conversation/Waveform';
|
||||||
|
import type { ComputePeaksResult } from '../../components/VoiceNotesPlaybackContext';
|
||||||
|
import { VoiceNotesPlaybackContext } from '../../components/VoiceNotesPlaybackContext';
|
||||||
|
import * as log from '../../logging/log';
|
||||||
|
|
||||||
|
const BAR_COUNT = 47;
|
||||||
|
|
||||||
|
type Props = {
|
||||||
|
// undefined if not downloaded yet
|
||||||
|
audioUrl: string | undefined;
|
||||||
|
computePeaks(url: string, barCount: number): Promise<ComputePeaksResult>;
|
||||||
|
duration: number | undefined;
|
||||||
|
onCorrupted: () => void;
|
||||||
|
barMinHeight: number;
|
||||||
|
barMaxHeight: number;
|
||||||
|
currentTime: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
function SmartWaveformImpl({
|
||||||
|
audioUrl,
|
||||||
|
barMinHeight,
|
||||||
|
barMaxHeight,
|
||||||
|
currentTime,
|
||||||
|
duration: activeDuration,
|
||||||
|
computePeaks,
|
||||||
|
onCorrupted,
|
||||||
|
}: Props) {
|
||||||
|
const [hasPeaks, setHasPeaks] = useState(false);
|
||||||
|
const [peaks, setPeaks] = useState<ReadonlyArray<number>>(
|
||||||
|
new Array(BAR_COUNT).fill(0)
|
||||||
|
);
|
||||||
|
|
||||||
|
const [duration, setDuration] = useState(activeDuration ?? 1e-23);
|
||||||
|
|
||||||
|
const isComputing = audioUrl && !hasPeaks;
|
||||||
|
|
||||||
|
// This effect loads audio file and computes its RMS peak for displaying the
|
||||||
|
// waveform.
|
||||||
|
useEffect(() => {
|
||||||
|
if (!isComputing) {
|
||||||
|
return noop;
|
||||||
|
}
|
||||||
|
|
||||||
|
log.info('MessageAudio: loading audio and computing waveform');
|
||||||
|
|
||||||
|
let canceled = false;
|
||||||
|
|
||||||
|
void (async () => {
|
||||||
|
try {
|
||||||
|
const { peaks: newPeaks, duration: newDuration } = await computePeaks(
|
||||||
|
audioUrl,
|
||||||
|
BAR_COUNT
|
||||||
|
);
|
||||||
|
if (canceled) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
setPeaks(newPeaks);
|
||||||
|
setHasPeaks(true);
|
||||||
|
setDuration(Math.max(newDuration, 1e-23));
|
||||||
|
} catch (err) {
|
||||||
|
log.error(
|
||||||
|
'MessageAudio: computePeaks error, marking as corrupted',
|
||||||
|
err
|
||||||
|
);
|
||||||
|
|
||||||
|
onCorrupted();
|
||||||
|
}
|
||||||
|
})();
|
||||||
|
|
||||||
|
return () => {
|
||||||
|
canceled = true;
|
||||||
|
};
|
||||||
|
}, [
|
||||||
|
audioUrl,
|
||||||
|
computePeaks,
|
||||||
|
setDuration,
|
||||||
|
setPeaks,
|
||||||
|
setHasPeaks,
|
||||||
|
onCorrupted,
|
||||||
|
isComputing,
|
||||||
|
]);
|
||||||
|
|
||||||
|
return (
|
||||||
|
<Waveform
|
||||||
|
peaks={peaks}
|
||||||
|
barMinHeight={barMinHeight}
|
||||||
|
barMaxHeight={barMaxHeight}
|
||||||
|
duration={duration}
|
||||||
|
currentTime={currentTime}
|
||||||
|
/>
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function SmartWaveform(props: Omit<Props, 'computePeaks'>): JSX.Element {
|
||||||
|
return (
|
||||||
|
<VoiceNotesPlaybackContext.Consumer>
|
||||||
|
{voiceNotesPlaybackProps => {
|
||||||
|
return (
|
||||||
|
voiceNotesPlaybackProps && (
|
||||||
|
<SmartWaveformImpl
|
||||||
|
{...props}
|
||||||
|
computePeaks={voiceNotesPlaybackProps.computePeaks}
|
||||||
|
/>
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}}
|
||||||
|
</VoiceNotesPlaybackContext.Consumer>
|
||||||
|
);
|
||||||
|
}
|
|
@ -12,7 +12,7 @@ import { noopAction } from '../../../state/ducks/noop';
|
||||||
import type { StateType } from '../../../state/reducer';
|
import type { StateType } from '../../../state/reducer';
|
||||||
import { reducer as rootReducer } from '../../../state/reducer';
|
import { reducer as rootReducer } from '../../../state/reducer';
|
||||||
import type { SelectedConversationChangedActionType } from '../../../state/ducks/conversations';
|
import type { SelectedConversationChangedActionType } from '../../../state/ducks/conversations';
|
||||||
import { actions } from '../../../state/ducks/audioPlayer';
|
import { actions, AudioPlayerContent } from '../../../state/ducks/audioPlayer';
|
||||||
import type { VoiceNoteAndConsecutiveForPlayback } from '../../../state/selectors/audioPlayer';
|
import type { VoiceNoteAndConsecutiveForPlayback } from '../../../state/selectors/audioPlayer';
|
||||||
|
|
||||||
const { messageDeleted, messageChanged } = conversationsActions;
|
const { messageDeleted, messageChanged } = conversationsActions;
|
||||||
|
@ -50,19 +50,23 @@ describe('both/state/ducks/audioPlayer', () => {
|
||||||
const state = getEmptyRootState();
|
const state = getEmptyRootState();
|
||||||
const updated = rootReducer(
|
const updated = rootReducer(
|
||||||
state,
|
state,
|
||||||
actions.loadMessageAudio({
|
actions.loadVoiceNoteAudio({
|
||||||
voiceNoteData: voiceNoteDataForMessage(MESSAGE_ID),
|
voiceNoteData: voiceNoteDataForMessage(MESSAGE_ID),
|
||||||
position: 0,
|
position: 0,
|
||||||
context: 'context',
|
context: 'context',
|
||||||
ourConversationId: 'convo',
|
ourConversationId: 'convo',
|
||||||
|
playbackRate: 1,
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
assert.strictEqual(
|
const content = updated.audioPlayer.active?.content;
|
||||||
updated.audioPlayer.active?.content?.current.id,
|
|
||||||
MESSAGE_ID
|
assert.isTrue(content && AudioPlayerContent.isVoiceNote(content));
|
||||||
);
|
|
||||||
assert.strictEqual(updated.audioPlayer.active?.content?.context, 'context');
|
if (content && AudioPlayerContent.isVoiceNote(content)) {
|
||||||
|
assert.strictEqual(content.current.id, MESSAGE_ID);
|
||||||
|
assert.strictEqual(content.context, 'context');
|
||||||
|
}
|
||||||
|
|
||||||
return updated;
|
return updated;
|
||||||
};
|
};
|
||||||
|
@ -74,21 +78,22 @@ describe('both/state/ducks/audioPlayer', () => {
|
||||||
|
|
||||||
const updated = rootReducer(
|
const updated = rootReducer(
|
||||||
state,
|
state,
|
||||||
actions.loadMessageAudio({
|
actions.loadVoiceNoteAudio({
|
||||||
voiceNoteData: voiceNoteDataForMessage('test'),
|
voiceNoteData: voiceNoteDataForMessage('test'),
|
||||||
position: 0,
|
position: 0,
|
||||||
context: 'context',
|
context: 'context',
|
||||||
ourConversationId: 'convo',
|
ourConversationId: 'convo',
|
||||||
|
playbackRate: 1,
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
assert.strictEqual(
|
|
||||||
updated.audioPlayer.active?.content?.current.id,
|
const content = updated.audioPlayer.active?.content;
|
||||||
'test'
|
assert.isTrue(content && AudioPlayerContent.isVoiceNote(content));
|
||||||
);
|
|
||||||
assert.strictEqual(
|
if (content && AudioPlayerContent.isVoiceNote(content)) {
|
||||||
updated.audioPlayer.active?.content?.context,
|
assert.strictEqual(content.current.id, 'test');
|
||||||
'context'
|
assert.strictEqual(content.context, 'context');
|
||||||
);
|
}
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -100,10 +105,12 @@ describe('both/state/ducks/audioPlayer', () => {
|
||||||
payload: { id: 'any' },
|
payload: { id: 'any' },
|
||||||
});
|
});
|
||||||
|
|
||||||
assert.strictEqual(
|
const content = updated.audioPlayer.active?.content;
|
||||||
updated.audioPlayer.active?.content?.current.id,
|
assert.isTrue(content && AudioPlayerContent.isVoiceNote(content));
|
||||||
MESSAGE_ID
|
|
||||||
);
|
if (content && AudioPlayerContent.isVoiceNote(content)) {
|
||||||
|
assert.strictEqual(content.current.id, MESSAGE_ID);
|
||||||
|
}
|
||||||
});
|
});
|
||||||
|
|
||||||
it('resets active.content when message was deleted', () => {
|
it('resets active.content when message was deleted', () => {
|
||||||
|
|
|
@ -47,11 +47,12 @@ describe('state/selectors/audioPlayer', () => {
|
||||||
|
|
||||||
const updated = rootReducer(
|
const updated = rootReducer(
|
||||||
state,
|
state,
|
||||||
actions.loadMessageAudio({
|
actions.loadVoiceNoteAudio({
|
||||||
voiceNoteData: voiceNoteDataForMessage('id'),
|
voiceNoteData: voiceNoteDataForMessage('id'),
|
||||||
position: 0,
|
position: 0,
|
||||||
context: 'context',
|
context: 'context',
|
||||||
ourConversationId: 'convo',
|
ourConversationId: 'convo',
|
||||||
|
playbackRate: 1,
|
||||||
})
|
})
|
||||||
);
|
);
|
||||||
|
|
||||||
|
|
|
@ -2,7 +2,6 @@
|
||||||
// SPDX-License-Identifier: AGPL-3.0-only
|
// SPDX-License-Identifier: AGPL-3.0-only
|
||||||
|
|
||||||
export enum ErrorDialogAudioRecorderType {
|
export enum ErrorDialogAudioRecorderType {
|
||||||
Blur,
|
|
||||||
ErrorRecording,
|
ErrorRecording,
|
||||||
Timeout,
|
Timeout,
|
||||||
}
|
}
|
||||||
|
|
|
@ -1976,6 +1976,27 @@
|
||||||
"reasonCategory": "usageTrusted",
|
"reasonCategory": "usageTrusted",
|
||||||
"updated": "2022-10-03T16:06:12.837Z"
|
"updated": "2022-10-03T16:06:12.837Z"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"rule": "React-useRef",
|
||||||
|
"path": "ts/components/CompositionRecording.tsx",
|
||||||
|
"line": " const startTime = useRef(Date.now());",
|
||||||
|
"reasonCategory": "usageTrusted",
|
||||||
|
"updated": "2023-02-26T23:17:41.234Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"rule": "React-useRef",
|
||||||
|
"path": "ts/components/CompositionRecording.tsx",
|
||||||
|
"line": " const drift = useRef(0);",
|
||||||
|
"reasonCategory": "usageTrusted",
|
||||||
|
"updated": "2023-02-26T23:20:28.848Z"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"rule": "React-useRef",
|
||||||
|
"path": "ts/components/CompositionRecordingDraft.tsx",
|
||||||
|
"line": " const timeout = useRef<undefined | NodeJS.Timeout>(undefined);",
|
||||||
|
"reasonCategory": "usageTrusted",
|
||||||
|
"updated": "2023-02-26T23:20:28.848Z"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"rule": "React-useRef",
|
"rule": "React-useRef",
|
||||||
"path": "ts/components/CompositionTextArea.tsx",
|
"path": "ts/components/CompositionTextArea.tsx",
|
||||||
|
@ -2377,13 +2398,6 @@
|
||||||
"updated": "2021-03-05T19:57:01.431Z",
|
"updated": "2021-03-05T19:57:01.431Z",
|
||||||
"reasonDetail": "Used for propagating click from the Message to MessageAudio's button"
|
"reasonDetail": "Used for propagating click from the Message to MessageAudio's button"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"rule": "React-useRef",
|
|
||||||
"path": "ts/components/conversation/MessageAudio.tsx",
|
|
||||||
"line": " const waveformRef = useRef<HTMLDivElement | null>(null);",
|
|
||||||
"reasonCategory": "usageTrusted",
|
|
||||||
"updated": "2021-07-30T16:57:33.618Z"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"rule": "React-useRef",
|
"rule": "React-useRef",
|
||||||
"path": "ts/components/conversation/Quote.tsx",
|
"path": "ts/components/conversation/Quote.tsx",
|
||||||
|
@ -2435,6 +2449,13 @@
|
||||||
"updated": "2019-11-01T22:46:33.013Z",
|
"updated": "2019-11-01T22:46:33.013Z",
|
||||||
"reasonDetail": "Used for setting focus only"
|
"reasonDetail": "Used for setting focus only"
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"rule": "React-useRef",
|
||||||
|
"path": "ts/components/conversation/WaveformScrubber.tsx",
|
||||||
|
"line": " const waveformRef = useRef<HTMLDivElement | null>(null);",
|
||||||
|
"reasonCategory": "usageTrusted",
|
||||||
|
"updated": "2023-02-26T23:20:28.848Z"
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"rule": "React-useRef",
|
"rule": "React-useRef",
|
||||||
"path": "ts/components/emoji/EmojiButton.tsx",
|
"path": "ts/components/emoji/EmojiButton.tsx",
|
||||||
|
|
|
@ -32,6 +32,7 @@ export function resolveDraftAttachmentOnDisk(
|
||||||
'caption',
|
'caption',
|
||||||
'contentType',
|
'contentType',
|
||||||
'fileName',
|
'fileName',
|
||||||
|
'flags',
|
||||||
'path',
|
'path',
|
||||||
'size',
|
'size',
|
||||||
'width',
|
'width',
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue