// Copyright 2023 Signal Messenger, LLC // SPDX-License-Identifier: AGPL-3.0-only import { noop } from 'lodash'; import React, { useCallback, useEffect, useRef, useState } from 'react'; import { useEscapeHandling } from '../hooks/useEscapeHandling'; import { usePrevious } from '../hooks/usePrevious'; import type { InMemoryAttachmentDraftType } from '../types/Attachment'; import { ErrorDialogAudioRecorderType } from '../types/AudioRecorder'; import type { LocalizerType } from '../types/Util'; import { DurationInSeconds, SECOND } from '../util/durations'; import { durationToPlaybackText } from '../util/durationToPlaybackText'; import { ConfirmationDialog } from './ConfirmationDialog'; import { RecordingComposer } from './RecordingComposer'; import { ToastVoiceNoteLimit } from './ToastVoiceNoteLimit'; export type Props = { i18n: LocalizerType; conversationId: string; onCancel: () => void; onSend: () => void; errorRecording: (e: ErrorDialogAudioRecorderType) => unknown; errorDialogAudioRecorderType?: ErrorDialogAudioRecorderType; addAttachment: ( conversationId: string, attachment: InMemoryAttachmentDraftType ) => unknown; completeRecording: ( conversationId: string, onRecordingComplete: (rec: InMemoryAttachmentDraftType) => unknown ) => unknown; }; export function CompositionRecording({ i18n, conversationId, onCancel, onSend, errorRecording, errorDialogAudioRecorderType, addAttachment, completeRecording, }: Props): JSX.Element { useEscapeHandling(onCancel); const [showVoiceNoteLimitToast, setShowVoiceNoteLimitToast] = useState(true); // when interrupted (blur, switching convos) // stop recording and save draft const handleRecordingInterruption = useCallback(() => { completeRecording(conversationId, attachment => { addAttachment(conversationId, attachment); }); }, [conversationId, completeRecording, addAttachment]); // switched to another app useEffect(() => { window.addEventListener('blur', handleRecordingInterruption); return () => { window.removeEventListener('blur', handleRecordingInterruption); }; }, [handleRecordingInterruption]); // switched conversations const previousConversationId = usePrevious(conversationId, conversationId); useEffect(() => { if (previousConversationId !== conversationId) { handleRecordingInterruption(); } }); const handleCloseToast = useCallback(() => { setShowVoiceNoteLimitToast(false); }, []); useEffect(() => { return () => { handleCloseToast(); }; }, [handleCloseToast]); const startTime = useRef(Date.now()); const [duration, setDuration] = useState(0); const drift = useRef(0); // update recording duration useEffect(() => { const timeoutId = setTimeout(() => { const now = Date.now(); const newDurationMs = now - startTime.current; drift.current = newDurationMs % SECOND; setDuration(newDurationMs / SECOND); if ( DurationInSeconds.fromMillis(newDurationMs) >= DurationInSeconds.HOUR ) { errorRecording(ErrorDialogAudioRecorderType.Timeout); } }, SECOND - drift.current); return () => { clearTimeout(timeoutId); }; }, [duration, errorRecording]); let confirmationDialog: JSX.Element | undefined; if (errorDialogAudioRecorderType === ErrorDialogAudioRecorderType.Timeout) { confirmationDialog = ( {i18n('icu:voiceRecordingInterruptedMax')} ); } else if ( errorDialogAudioRecorderType === ErrorDialogAudioRecorderType.ErrorRecording ) { confirmationDialog = ( {i18n('icu:voiceNoteError')} ); } return (
{durationToPlaybackText(duration)}
{confirmationDialog} {showVoiceNoteLimitToast && ( )} ); }