Voice notes drafts

This commit is contained in:
Alvaro 2023-03-02 13:55:40 -07:00 committed by GitHub
parent 356fb301e1
commit 99015d7b96
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
48 changed files with 2113 additions and 909 deletions

View file

@ -28,7 +28,14 @@ import { assertDev } from '../../util/assert';
// State
export type AudioPlayerContent = ReadonlyDeep<{
/** Some audio identified by a URL (currently only used for drafts) */
type AudioPlayerContentDraft = ReadonlyDeep<{
conversationId: string;
url: string;
}>;
/** A voice note, with a queue for consecutive playback */
export type AudioPlayerContentVoiceNote = ReadonlyDeep<{
conversationId: string;
context: string;
current: VoiceNoteForPlayback;
@ -46,9 +53,26 @@ export type ActiveAudioPlayerStateType = ReadonlyDeep<{
playbackRate: number;
duration: number | undefined; // never zero or NaN
startPosition: number;
content: AudioPlayerContent;
content: AudioPlayerContentVoiceNote | AudioPlayerContentDraft;
}>;
/* eslint-disable @typescript-eslint/no-namespace */
export namespace AudioPlayerContent {
export function isVoiceNote(
content: ActiveAudioPlayerStateType['content']
): content is AudioPlayerContentVoiceNote {
return (
('current' as const satisfies keyof AudioPlayerContentVoiceNote) in
content
);
}
export function isDraft(
content: ActiveAudioPlayerStateType['content']
): content is AudioPlayerContentDraft {
return !isVoiceNote(content);
}
}
export type AudioPlayerStateType = ReadonlyDeep<{
active: ActiveAudioPlayerStateType | undefined;
}>;
@ -58,18 +82,10 @@ export type AudioPlayerStateType = ReadonlyDeep<{
export type SetMessageAudioAction = ReadonlyDeep<{
type: 'audioPlayer/SET_MESSAGE_AUDIO';
payload:
| {
conversationId: string;
context: string;
current: VoiceNoteForPlayback;
queue: ReadonlyArray<VoiceNoteForPlayback>;
isConsecutive: boolean;
// timestamp of the message following the last one in the queue
nextMessageTimestamp: number | undefined;
ourConversationId: string | undefined;
startPosition: number;
| ((AudioPlayerContentVoiceNote | AudioPlayerContentDraft) & {
playbackRate: number;
}
startPosition: number;
})
| undefined;
}>;
@ -115,7 +131,8 @@ type AudioPlayerActionType = ReadonlyDeep<
// Action Creators
export const actions = {
loadMessageAudio,
loadVoiceNoteAudio,
loadVoiceNoteDraftAudio,
setPlaybackRate,
currentTimeUpdated,
durationChanged,
@ -195,22 +212,24 @@ function setPlaybackRate(
/**
* Load message audio into the "content", the smart MiniPlayer will then play it
*/
function loadMessageAudio({
function loadVoiceNoteAudio({
voiceNoteData,
position,
context,
ourConversationId,
playbackRate,
}: {
voiceNoteData: VoiceNoteAndConsecutiveForPlayback;
position: number;
context: string;
ourConversationId: string;
playbackRate: number;
}): SetMessageAudioAction {
const {
conversationId,
voiceNote,
consecutiveVoiceNotes,
playbackRate,
// playbackRate,
nextMessageTimestamp,
} = voiceNoteData;
return {
@ -229,6 +248,18 @@ function loadMessageAudio({
};
}
export function loadVoiceNoteDraftAudio(
content: AudioPlayerContentDraft & {
playbackRate: number;
startPosition: number;
}
): SetMessageAudioAction {
return {
type: 'audioPlayer/SET_MESSAGE_AUDIO',
payload: content,
};
}
function setIsPlaying(value: boolean): SetIsPlayingAction {
return {
type: 'audioPlayer/SET_IS_PLAYING',
@ -272,6 +303,14 @@ export function reducer(
if (action.type === 'audioPlayer/SET_MESSAGE_AUDIO') {
const { payload } = action;
if (payload === undefined) {
return {
...state,
active: undefined,
};
}
const { playbackRate, startPosition, ...content } = payload;
return {
...state,
active:
@ -281,9 +320,9 @@ export function reducer(
currentTime: 0,
duration: undefined,
playing: true,
playbackRate: payload.playbackRate,
content: payload,
startPosition: payload.startPosition,
playbackRate,
content,
startPosition,
},
};
}
@ -363,6 +402,10 @@ export function reducer(
return state;
}
if (!AudioPlayerContent.isVoiceNote(content)) {
return state;
}
if (content.conversationId !== action.payload.conversationId) {
return state;
}
@ -436,6 +479,13 @@ export function reducer(
return state;
}
if (AudioPlayerContent.isDraft(content)) {
return {
...state,
active: undefined,
};
}
const { queue } = content;
const [nextVoiceNote, ...newQueue] = queue;
@ -475,6 +525,10 @@ export function reducer(
}
const { content } = active;
if (!AudioPlayerContent.isVoiceNote(content)) {
return state;
}
// if we deleted the message currently being played
// move on to the next message
if (content.current.id === id) {
@ -532,6 +586,10 @@ export function reducer(
return state;
}
if (AudioPlayerContent.isDraft(content)) {
return state;
}
const { id, data } = action.payload;
const { attachments } = data;

View file

@ -14,6 +14,7 @@ import { stringToMIMEType } from '../../types/MIME';
import type { BoundActionCreatorsMapObject } from '../../hooks/useBoundActions';
import { useBoundActions } from '../../hooks/useBoundActions';
import { getComposerStateForConversation } from './composer';
import * as Errors from '../../types/errors';
import {
ErrorDialogAudioRecorderType,
@ -73,8 +74,9 @@ export const actions = {
startRecording,
};
export const useActions = (): BoundActionCreatorsMapObject<typeof actions> =>
useBoundActions(actions);
export const useAudioRecorderActions = (): BoundActionCreatorsMapObject<
typeof actions
> => useBoundActions(actions);
function startRecording(
conversationId: string
@ -133,9 +135,9 @@ function completeRecordingAction(): CompleteRecordingAction {
};
}
function completeRecording(
export function completeRecording(
conversationId: string,
onSendAudioRecording?: (rec: InMemoryAttachmentDraftType) => unknown
onRecordingComplete: (rec: InMemoryAttachmentDraftType) => unknown
): ThunkAction<
void,
RootStateType,
@ -172,9 +174,7 @@ function completeRecording(
flags: Proto.AttachmentPointer.Flags.VOICE_MESSAGE,
};
if (onSendAudioRecording) {
onSendAudioRecording(voiceNoteAttachment);
}
onRecordingComplete(voiceNoteAttachment);
} finally {
dispatch(completeRecordingAction());
}

View file

@ -34,6 +34,7 @@ import {
REMOVE_PREVIEW as REMOVE_LINK_PREVIEW,
} from './linkPreviews';
import { LinkPreviewSourceType } from '../../types/LinkPreview';
import { completeRecording } from './audioRecorder';
import { RecordingState } from '../../types/AudioRecorder';
import { SHOW_TOAST } from './toast';
import { ToastType } from '../../types/Toast';
@ -333,6 +334,28 @@ function scrollToQuotedMessage({
};
}
export function handleLeaveConversation(
conversationId: string
): ThunkAction<void, RootStateType, unknown, never> {
return (dispatch, getState) => {
const { audioRecorder } = getState();
if (audioRecorder.recordingState !== RecordingState.Recording) {
return;
}
// save draft of voice note
dispatch(
completeRecording(conversationId, attachment => {
dispatch(
addPendingAttachment(conversationId, { ...attachment, pending: true })
);
dispatch(addAttachment(conversationId, attachment));
})
);
};
}
function sendMultiMediaMessage(
conversationId: string,
options: {
@ -686,8 +709,23 @@ function addAttachment(
const conversation = window.ConversationController.get(conversationId);
if (conversation) {
conversation.attributes.draftAttachments = nextAttachments;
conversation.attributes.draftChanged = true;
conversation.set({
draftAttachments: nextAttachments,
draftChanged: true,
});
// if the conversation has already unloaded
if (!isSelectedConversation) {
const now = Date.now();
const activeAt = conversation.get('active_at') || now;
conversation.set({
active_at: activeAt,
draftChanged: false,
draftTimestamp: now,
timestamp: now,
});
}
window.Signal.Data.updateConversation(conversation.attributes);
}
};

View file

@ -136,15 +136,16 @@ import { UUIDKind } from '../../types/UUID';
import { removeLinkPreview } from '../../services/LinkPreview';
import type {
ReplaceAttachmentsActionType,
ResetComposerActionType,
SetFocusActionType,
SetQuotedMessageActionType,
ResetComposerActionType,
} from './composer';
import {
replaceAttachments,
setComposerFocus,
setQuoteByMessageId,
resetComposer,
handleLeaveConversation,
} from './composer';
import { ReceiptType } from '../../types/Receipt';
@ -3535,6 +3536,12 @@ function showConversation({
return;
}
// notify composer in case we need to stop recording a voice note
if (conversations.selectedConversationId) {
log.error('conversations - handleLeave');
dispatch(handleLeaveConversation(conversations.selectedConversationId));
}
dispatch({
type: SELECTED_CONVERSATION_CHANGED,
payload: {

View file

@ -7,7 +7,6 @@ import filesize from 'filesize';
import getDirection from 'direction';
import emojiRegex from 'emoji-regex';
import LinkifyIt from 'linkify-it';
import type { StateType } from '../reducer';
import type {
LastMessageStatus,

View file

@ -1,6 +1,7 @@
// Copyright 2019 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import React from 'react';
import { connect } from 'react-redux';
import { get } from 'lodash';
import { mapDispatchToProps } from '../actions';
@ -31,6 +32,10 @@ import {
} from '../selectors/stickers';
import { isSignalConversation } from '../../util/isSignalConversation';
import { getComposerStateForConversationIdSelector } from '../selectors/composer';
import type { SmartCompositionRecordingProps } from './CompositionRecording';
import { SmartCompositionRecording } from './CompositionRecording';
import type { SmartCompositionRecordingDraftProps } from './CompositionRecordingDraft';
import { SmartCompositionRecordingDraft } from './CompositionRecordingDraft';
type ExternalProps = {
id: string;
@ -145,6 +150,16 @@ const mapStateToProps = (state: StateType, props: ExternalProps) => {
draftText: dropNull(draftText),
draftBodyRanges,
renderSmartCompositionRecording: (
recProps: SmartCompositionRecordingProps
) => {
return <SmartCompositionRecording {...recProps} />;
},
renderSmartCompositionRecordingDraft: (
draftProps: SmartCompositionRecordingDraftProps
) => {
return <SmartCompositionRecordingDraft {...draftProps} />;
},
};
};

View file

@ -0,0 +1,59 @@
// Copyright 2022 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import React, { useCallback } from 'react';
import { useSelector } from 'react-redux';
import { CompositionRecording } from '../../components/CompositionRecording';
import { mapDispatchToProps } from '../actions';
import { useAudioRecorderActions } from '../ducks/audioRecorder';
import { useComposerActions } from '../ducks/composer';
import { getSelectedConversationId } from '../selectors/conversations';
import { getIntl } from '../selectors/user';
export type SmartCompositionRecordingProps = {
onBeforeSend: () => void;
};
export function SmartCompositionRecording({
onBeforeSend,
}: SmartCompositionRecordingProps): JSX.Element | null {
const i18n = useSelector(getIntl);
const selectedConversationId = useSelector(getSelectedConversationId);
const { cancelRecording, completeRecording } = useAudioRecorderActions();
const { sendMultiMediaMessage } = useComposerActions();
const handleCancel = useCallback(() => {
cancelRecording();
}, [cancelRecording]);
const handleSend = useCallback(() => {
if (selectedConversationId) {
completeRecording(selectedConversationId, voiceNoteAttachment => {
onBeforeSend();
sendMultiMediaMessage(selectedConversationId, { voiceNoteAttachment });
});
}
}, [
selectedConversationId,
completeRecording,
onBeforeSend,
sendMultiMediaMessage,
]);
if (!selectedConversationId) {
return null;
}
return (
<CompositionRecording
i18n={i18n}
conversationId={selectedConversationId}
onCancel={handleCancel}
onSend={handleSend}
errorRecording={mapDispatchToProps.errorRecording}
addAttachment={mapDispatchToProps.addAttachment}
completeRecording={mapDispatchToProps.completeRecording}
/>
);
}

View file

@ -0,0 +1,156 @@
// Copyright 2023 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import React, { useCallback } from 'react';
import { useSelector } from 'react-redux';
import { CompositionRecordingDraft } from '../../components/CompositionRecordingDraft';
import type { AttachmentDraftType } from '../../types/Attachment';
import {
AudioPlayerContent,
useAudioPlayerActions,
} from '../ducks/audioPlayer';
import { useComposerActions } from '../ducks/composer';
import { selectAudioPlayerActive } from '../selectors/audioPlayer';
import {
getConversationByIdSelector,
getSelectedConversationId,
} from '../selectors/conversations';
import { getIntl } from '../selectors/user';
export type SmartCompositionRecordingDraftProps = {
voiceNoteAttachment: AttachmentDraftType;
};
export function SmartCompositionRecordingDraft({
voiceNoteAttachment,
}: SmartCompositionRecordingDraftProps): JSX.Element {
const i18n = useSelector(getIntl);
const active = useSelector(selectAudioPlayerActive);
const selectedConversationId = useSelector(getSelectedConversationId);
const getConversationById = useSelector(getConversationByIdSelector);
const {
loadVoiceNoteDraftAudio,
unloadMessageAudio,
setIsPlaying,
setPosition,
} = useAudioPlayerActions();
const { sendMultiMediaMessage, removeAttachment } = useComposerActions();
if (!selectedConversationId) {
throw new Error('No selected conversation');
}
const playbackRate =
getConversationById(selectedConversationId)?.voiceNotePlaybackRate ?? 1;
const audioUrl = !voiceNoteAttachment.pending
? voiceNoteAttachment.url
: undefined;
const content = active?.content;
const draftActive =
content && AudioPlayerContent.isDraft(content) && content.url === audioUrl
? active
: undefined;
const handlePlay = useCallback(
(positionAsRatio?: number) => {
if (!draftActive && audioUrl) {
loadVoiceNoteDraftAudio({
conversationId: selectedConversationId,
url: audioUrl,
startPosition: positionAsRatio ?? 0,
playbackRate,
});
}
if (draftActive) {
if (positionAsRatio !== undefined) {
setPosition(positionAsRatio);
}
if (!draftActive.playing) {
setIsPlaying(true);
}
}
},
[
draftActive,
audioUrl,
loadVoiceNoteDraftAudio,
selectedConversationId,
playbackRate,
setPosition,
setIsPlaying,
]
);
const handlePause = useCallback(() => {
setIsPlaying(false);
}, [setIsPlaying]);
const handleSend = useCallback(() => {
if (selectedConversationId) {
sendMultiMediaMessage(selectedConversationId, {
draftAttachments: [voiceNoteAttachment],
});
}
}, [selectedConversationId, sendMultiMediaMessage, voiceNoteAttachment]);
const handleCancel = useCallback(() => {
unloadMessageAudio();
if (selectedConversationId && voiceNoteAttachment.path) {
removeAttachment(selectedConversationId, voiceNoteAttachment.path);
}
}, [
removeAttachment,
selectedConversationId,
unloadMessageAudio,
voiceNoteAttachment.path,
]);
const handleScrub = useCallback(
(positionAsRatio: number) => {
// if scrubbing when audio not loaded
if (!draftActive && audioUrl) {
loadVoiceNoteDraftAudio({
conversationId: selectedConversationId,
url: audioUrl,
startPosition: positionAsRatio,
playbackRate,
});
return;
}
// if scrubbing when audio is loaded
if (draftActive) {
setPosition(positionAsRatio);
if (draftActive?.playing) {
setIsPlaying(true);
}
}
},
[
audioUrl,
draftActive,
loadVoiceNoteDraftAudio,
playbackRate,
selectedConversationId,
setIsPlaying,
setPosition,
]
);
return (
<CompositionRecordingDraft
i18n={i18n}
audioUrl={audioUrl}
active={draftActive}
onCancel={handleCancel}
onSend={handleSend}
onPlay={handlePlay}
onPause={handlePause}
onScrub={handleScrub}
/>
);
}

View file

@ -6,7 +6,10 @@ import { useSelector } from 'react-redux';
import { MessageAudio } from '../../components/conversation/MessageAudio';
import type { OwnProps as MessageAudioOwnProps } from '../../components/conversation/MessageAudio';
import type { ActiveAudioPlayerStateType } from '../ducks/audioPlayer';
import { useAudioPlayerActions } from '../ducks/audioPlayer';
import {
AudioPlayerContent,
useAudioPlayerActions,
} from '../ducks/audioPlayer';
import {
selectAudioPlayerActive,
selectVoiceNoteAndConsecutive,
@ -14,6 +17,10 @@ import {
import { useConversationsActions } from '../ducks/conversations';
import { getUserConversationId } from '../selectors/user';
import * as log from '../../logging/log';
import {
getConversationByIdSelector,
getSelectedConversationId,
} from '../selectors/conversations';
export type Props = Omit<MessageAudioOwnProps, 'active' | 'onPlayMessage'> & {
renderingContext: string;
@ -24,18 +31,28 @@ export function SmartMessageAudio({
...props
}: Props): JSX.Element | null {
const active = useSelector(selectAudioPlayerActive);
const { loadMessageAudio, setIsPlaying, setPlaybackRate, setPosition } =
const { loadVoiceNoteAudio, setIsPlaying, setPlaybackRate, setPosition } =
useAudioPlayerActions();
const { pushPanelForConversation } = useConversationsActions();
const getVoiceNoteData = useSelector(selectVoiceNoteAndConsecutive);
const ourConversationId = useSelector(getUserConversationId);
const getConversationById = useSelector(getConversationByIdSelector);
const selectedConversationId = useSelector(getSelectedConversationId);
if (!selectedConversationId) {
throw new Error('No selected conversation');
}
const playbackRate =
getConversationById(selectedConversationId)?.voiceNotePlaybackRate ?? 1;
const content = active?.content;
const messageActive: ActiveAudioPlayerStateType | undefined =
active &&
active.content &&
active.content.current.id === props.id &&
active.content.context === renderingContext
content &&
AudioPlayerContent.isVoiceNote(content) &&
content.current.id === props.id &&
content.context === renderingContext
? active
: undefined;
@ -55,14 +72,21 @@ export function SmartMessageAudio({
return;
}
loadMessageAudio({
loadVoiceNoteAudio({
voiceNoteData,
position,
context: renderingContext,
ourConversationId,
playbackRate,
});
},
[getVoiceNoteData, loadMessageAudio, ourConversationId, renderingContext]
[
getVoiceNoteData,
loadVoiceNoteAudio,
ourConversationId,
renderingContext,
playbackRate,
]
);
return (

View file

@ -4,7 +4,10 @@
import React, { useCallback } from 'react';
import { useSelector } from 'react-redux';
import { MiniPlayer, PlayerState } from '../../components/MiniPlayer';
import { useAudioPlayerActions } from '../ducks/audioPlayer';
import {
AudioPlayerContent,
useAudioPlayerActions,
} from '../ducks/audioPlayer';
import {
selectAudioPlayerActive,
selectVoiceNoteTitle,
@ -30,15 +33,25 @@ export function SmartMiniPlayer(): JSX.Element | null {
return null;
}
const { content } = active;
const url = AudioPlayerContent.isVoiceNote(content)
? content.current.url
: content.url;
let state = PlayerState.loading;
if (active.content.current.url) {
if (url) {
state = active.playing ? PlayerState.playing : PlayerState.paused;
}
return (
<MiniPlayer
i18n={i18n}
title={getVoiceNoteTitle(active.content.current)}
title={
AudioPlayerContent.isDraft(content)
? i18n('you')
: getVoiceNoteTitle(content.current)
}
onPlay={handlePlay}
onPause={handlePause}
onPlaybackRate={setPlaybackRate}

View file

@ -6,7 +6,10 @@ import { useSelector } from 'react-redux';
import type { VoiceNotesPlaybackProps } from '../../components/VoiceNotesPlaybackContext';
import { VoiceNotesPlaybackProvider } from '../../components/VoiceNotesPlaybackContext';
import { selectAudioPlayerActive } from '../selectors/audioPlayer';
import { useAudioPlayerActions } from '../ducks/audioPlayer';
import {
AudioPlayerContent,
useAudioPlayerActions,
} from '../ducks/audioPlayer';
import { globalMessageAudio } from '../../services/globalMessageAudio';
import { strictAssert } from '../../util/assert';
import * as log from '../../logging/log';
@ -36,8 +39,22 @@ export function SmartVoiceNotesPlaybackProvider(
const previousStartPosition = usePrevious(undefined, active?.startPosition);
const content = active?.content;
const current = content?.current;
const url = current?.url;
let url: undefined | string;
let messageId: undefined | string;
let messageIdForLogging: undefined | string;
let playNextConsecutiveSound = false;
let playFinishConsecutiveSound = false;
if (content && AudioPlayerContent.isVoiceNote(content)) {
({ url, id: messageId } = content.current);
messageIdForLogging = content.current.messageIdForLogging;
playNextConsecutiveSound = content.isConsecutive;
playFinishConsecutiveSound =
content.isConsecutive && content.queue.length === 0;
}
if (content && AudioPlayerContent.isDraft(content)) {
url = content.url;
}
const {
messageAudioEnded,
@ -49,7 +66,7 @@ export function SmartVoiceNotesPlaybackProvider(
useEffect(() => {
// if we don't have a new audio source
// just control playback
if (!content || !current || !url || url === globalMessageAudio.url) {
if (!content || !url || url === globalMessageAudio.url) {
if (!active?.playing && globalMessageAudio.playing) {
globalMessageAudio.pause();
}
@ -65,71 +82,52 @@ export function SmartVoiceNotesPlaybackProvider(
if (
active &&
active.startPosition !== undefined &&
active.startPosition !== previousStartPosition
active.startPosition !== previousStartPosition &&
globalMessageAudio.duration !== undefined
) {
globalMessageAudio.currentTime =
active.startPosition * globalMessageAudio.duration;
}
if (!active?.playing && globalMessageAudio.playing) {
globalMessageAudio.pause();
}
if (active?.playing && !globalMessageAudio.playing) {
globalMessageAudio.play();
}
if (active && active.playbackRate !== globalMessageAudio.playbackRate) {
globalMessageAudio.playbackRate = active.playbackRate;
}
// if user requested a new position
if (
active &&
active.startPosition !== undefined &&
active.startPosition !== previousStartPosition &&
active.duration
) {
globalMessageAudio.currentTime = active.startPosition * active.duration;
}
return;
}
// otherwise we have a new audio source
// we just load it and play it
globalMessageAudio.load({
// if we have a new audio source
loadAudio({
url,
playbackRate: active.playbackRate,
onLoadedMetadata() {
strictAssert(
!Number.isNaN(globalMessageAudio.duration),
'Audio should have definite duration on `loadedmetadata` event'
);
log.info(
'SmartVoiceNotesPlaybackProvider: `loadedmetadata` event',
current.id
);
if (active.startPosition !== 0) {
globalMessageAudio.currentTime =
active.startPosition * globalMessageAudio.duration;
}
},
onDurationChange() {
log.info(
'SmartVoiceNotesPlaybackProvider: `durationchange` event',
current.id
);
const reportedDuration = globalMessageAudio.duration;
// the underlying Audio element can return NaN if the audio hasn't loaded
// we filter out 0 or NaN as they are not useful values downstream
const newDuration =
Number.isNaN(reportedDuration) || reportedDuration === 0
? undefined
: reportedDuration;
durationChanged(newDuration);
},
onTimeUpdate() {
currentTimeUpdated(globalMessageAudio.currentTime);
},
onEnded() {
if (content.isConsecutive && content.queue.length === 0) {
void stateChangeConfirmDownSound.play();
}
messageAudioEnded();
},
onError(error) {
log.error(
'SmartVoiceNotesPlaybackProvider: playback error',
current.messageIdForLogging,
Errors.toLogFormat(error)
);
unloadMessageAudio();
},
messageId,
messageIdForLogging,
startPosition: active.startPosition,
playFinishConsecutiveSound,
durationChanged,
unloadMessageAudio,
currentTimeUpdated,
messageAudioEnded,
});
// if this message was part of the queue (consecutive, added indirectly)
// we play a note to let the user we're onto a new message
// (false for the first message in a consecutive group, since the user initiated it)
if (content.isConsecutive) {
if (playNextConsecutiveSound) {
// eslint-disable-next-line more/no-then
void stateChangeConfirmUpSound.play().then(() => {
globalMessageAudio.play();
@ -138,17 +136,101 @@ export function SmartVoiceNotesPlaybackProvider(
globalMessageAudio.play();
}
if (!current.isPlayed) {
const message = conversations.messagesLookup[current.id];
if (message && message.seenStatus !== SeenStatus.Unseen) {
markViewed(current.id);
if (AudioPlayerContent.isVoiceNote(content)) {
if (!content.current.isPlayed) {
const message = conversations.messagesLookup[content.current.id];
if (message && message.seenStatus !== SeenStatus.Unseen) {
markViewed(content.current.id);
}
} else {
log.info('SmartVoiceNotesPlaybackProvider: message already played', {
message: content.current.messageIdForLogging,
});
}
} else {
log.info('SmartVoiceNotesPlaybackProvider: message already played', {
message: current.messageIdForLogging,
});
}
});
}, [
active,
content,
conversations.messagesLookup,
currentTimeUpdated,
durationChanged,
messageAudioEnded,
messageId,
messageIdForLogging,
playFinishConsecutiveSound,
playNextConsecutiveSound,
previousStartPosition,
unloadMessageAudio,
url,
]);
return <VoiceNotesPlaybackProvider {...props} />;
}
function loadAudio({
url,
playbackRate,
messageId,
messageIdForLogging,
startPosition,
playFinishConsecutiveSound,
durationChanged,
currentTimeUpdated,
messageAudioEnded,
unloadMessageAudio,
}: {
url: string;
playbackRate: number;
messageId: string | undefined;
messageIdForLogging: string | undefined;
startPosition: number;
playFinishConsecutiveSound: boolean;
durationChanged: (value: number | undefined) => void;
currentTimeUpdated: (value: number) => void;
messageAudioEnded: () => void;
unloadMessageAudio: () => void;
}) {
globalMessageAudio.load({
url,
playbackRate,
onLoadedMetadata() {
strictAssert(
globalMessageAudio.duration !== undefined,
'Audio should have definite duration on `loadedmetadata` event'
);
log.info(
'SmartVoiceNotesPlaybackProvider: `loadedmetadata` event',
messageId
);
if (startPosition !== 0) {
globalMessageAudio.currentTime =
startPosition * globalMessageAudio.duration;
}
durationChanged(globalMessageAudio.duration);
},
onDurationChange() {
log.info(
'SmartVoiceNotesPlaybackProvider: `durationchange` event',
messageId
);
durationChanged(globalMessageAudio.duration);
},
onTimeUpdate() {
currentTimeUpdated(globalMessageAudio.currentTime);
},
onEnded() {
if (playFinishConsecutiveSound) {
void stateChangeConfirmDownSound.play();
}
messageAudioEnded();
},
onError(error) {
log.error(
'SmartVoiceNotesPlaybackProvider: playback error',
messageIdForLogging,
Errors.toLogFormat(error)
);
unloadMessageAudio();
},
});
}

114
ts/state/smart/Waveform.tsx Normal file
View file

@ -0,0 +1,114 @@
// Copyright 2023 Signal Messenger, LLC
// SPDX-License-Identifier: AGPL-3.0-only
import { noop } from 'lodash';
import React, { useEffect, useState } from 'react';
import { Waveform } from '../../components/conversation/Waveform';
import type { ComputePeaksResult } from '../../components/VoiceNotesPlaybackContext';
import { VoiceNotesPlaybackContext } from '../../components/VoiceNotesPlaybackContext';
import * as log from '../../logging/log';
const BAR_COUNT = 47;
type Props = {
// undefined if not downloaded yet
audioUrl: string | undefined;
computePeaks(url: string, barCount: number): Promise<ComputePeaksResult>;
duration: number | undefined;
onCorrupted: () => void;
barMinHeight: number;
barMaxHeight: number;
currentTime: number;
};
function SmartWaveformImpl({
audioUrl,
barMinHeight,
barMaxHeight,
currentTime,
duration: activeDuration,
computePeaks,
onCorrupted,
}: Props) {
const [hasPeaks, setHasPeaks] = useState(false);
const [peaks, setPeaks] = useState<ReadonlyArray<number>>(
new Array(BAR_COUNT).fill(0)
);
const [duration, setDuration] = useState(activeDuration ?? 1e-23);
const isComputing = audioUrl && !hasPeaks;
// This effect loads audio file and computes its RMS peak for displaying the
// waveform.
useEffect(() => {
if (!isComputing) {
return noop;
}
log.info('MessageAudio: loading audio and computing waveform');
let canceled = false;
void (async () => {
try {
const { peaks: newPeaks, duration: newDuration } = await computePeaks(
audioUrl,
BAR_COUNT
);
if (canceled) {
return;
}
setPeaks(newPeaks);
setHasPeaks(true);
setDuration(Math.max(newDuration, 1e-23));
} catch (err) {
log.error(
'MessageAudio: computePeaks error, marking as corrupted',
err
);
onCorrupted();
}
})();
return () => {
canceled = true;
};
}, [
audioUrl,
computePeaks,
setDuration,
setPeaks,
setHasPeaks,
onCorrupted,
isComputing,
]);
return (
<Waveform
peaks={peaks}
barMinHeight={barMinHeight}
barMaxHeight={barMaxHeight}
duration={duration}
currentTime={currentTime}
/>
);
}
export function SmartWaveform(props: Omit<Props, 'computePeaks'>): JSX.Element {
return (
<VoiceNotesPlaybackContext.Consumer>
{voiceNotesPlaybackProps => {
return (
voiceNotesPlaybackProps && (
<SmartWaveformImpl
{...props}
computePeaks={voiceNotesPlaybackProps.computePeaks}
/>
)
);
}}
</VoiceNotesPlaybackContext.Consumer>
);
}