signal-desktop/ts/components/GroupCallRemoteParticipant.tsx

527 lines
17 KiB
TypeScript
Raw Normal View History

2023-01-03 19:55:46 +00:00
// Copyright 2020 Signal Messenger, LLC
2020-11-13 19:57:55 +00:00
// SPDX-License-Identifier: AGPL-3.0-only
import type { CSSProperties, ReactNode } from 'react';
2020-11-17 19:49:48 +00:00
import React, {
useState,
useRef,
useMemo,
useCallback,
useEffect,
} from 'react';
import classNames from 'classnames';
2020-11-13 19:57:55 +00:00
import { noop } from 'lodash';
2023-01-09 18:38:57 +00:00
import type { VideoFrameSource } from '@signalapp/ringrtc';
import type { GroupCallRemoteParticipantType } from '../types/Calling';
import type { LocalizerType } from '../types/Util';
2021-08-06 00:17:05 +00:00
import { AvatarColors } from '../types/Colors';
2020-11-13 19:57:55 +00:00
import { CallBackgroundBlur } from './CallBackgroundBlur';
2023-02-28 20:01:52 +00:00
import {
CallingAudioIndicator,
SPEAKING_LINGER_MS,
} from './CallingAudioIndicator';
import { Avatar, AvatarSize } from './Avatar';
import { ConfirmationDialog } from './ConfirmationDialog';
2024-05-15 21:48:02 +00:00
import { I18n } from './I18n';
import { ContactName } from './conversation/ContactName';
2021-09-17 22:24:21 +00:00
import { useIntersectionObserver } from '../hooks/useIntersectionObserver';
import { MAX_FRAME_HEIGHT, MAX_FRAME_WIDTH } from '../calling/constants';
2023-02-28 20:01:52 +00:00
import { useValueAtFixedRate } from '../hooks/useValueAtFixedRate';
2024-01-23 19:08:21 +00:00
import { Theme } from '../util/theme';
import { isOlderThan } from '../util/timestamp';
import type { CallingImageDataCache } from './CallManager';
import { usePrevious } from '../hooks/usePrevious';
2020-11-17 19:49:48 +00:00
const MAX_TIME_TO_SHOW_STALE_VIDEO_FRAMES = 10000;
const MAX_TIME_TO_SHOW_STALE_SCREENSHARE_FRAMES = 60000;
2024-01-23 19:08:21 +00:00
const DELAY_TO_SHOW_MISSING_MEDIA_KEYS = 5000;
type BasePropsType = {
getFrameBuffer: () => Buffer;
2020-11-13 19:57:55 +00:00
getGroupCallVideoFrameSource: (demuxId: number) => VideoFrameSource;
i18n: LocalizerType;
imageDataCache: React.RefObject<CallingImageDataCache>;
isActiveSpeakerInSpeakerView: boolean;
isCallReconnecting: boolean;
onClickRaisedHand?: () => void;
onVisibilityChanged?: (demuxId: number, isVisible: boolean) => unknown;
remoteParticipant: GroupCallRemoteParticipantType;
remoteParticipantsCount: number;
};
2020-11-17 19:49:48 +00:00
type InPipPropsType = {
2020-11-17 19:49:48 +00:00
isInPip: true;
};
2020-11-17 19:49:48 +00:00
type InOverflowAreaPropsType = {
2020-11-17 19:49:48 +00:00
height: number;
isInPip?: false;
2022-05-19 03:28:51 +00:00
audioLevel: number;
2021-01-08 18:58:28 +00:00
width: number;
};
2021-01-08 18:58:28 +00:00
type InGridPropsType = InOverflowAreaPropsType & {
2020-11-13 19:57:55 +00:00
left: number;
top: number;
};
2020-11-13 19:57:55 +00:00
2021-01-08 18:58:28 +00:00
export type PropsType = BasePropsType &
(InPipPropsType | InOverflowAreaPropsType | InGridPropsType);
2020-11-17 19:49:48 +00:00
export const GroupCallRemoteParticipant: React.FC<PropsType> = React.memo(
2022-11-18 00:45:19 +00:00
function GroupCallRemoteParticipantInner(props) {
const {
getFrameBuffer,
getGroupCallVideoFrameSource,
imageDataCache,
i18n,
onClickRaisedHand,
onVisibilityChanged,
remoteParticipantsCount,
isActiveSpeakerInSpeakerView,
isCallReconnecting,
} = props;
const {
2021-05-07 22:21:10 +00:00
acceptedMessageRequest,
2024-01-23 19:08:21 +00:00
addedTime,
2024-07-11 19:44:09 +00:00
avatarUrl,
color,
demuxId,
hasRemoteAudio,
hasRemoteVideo,
2023-12-06 21:52:29 +00:00
isHandRaised,
isBlocked,
2021-05-07 22:21:10 +00:00
isMe,
2024-01-23 19:08:21 +00:00
mediaKeysReceived,
profileName,
2021-05-07 22:21:10 +00:00
sharedGroupNames,
sharingScreen,
title,
2021-01-08 18:58:28 +00:00
videoAspectRatio,
} = props.remoteParticipant;
2023-02-28 20:01:52 +00:00
const isSpeaking = useValueAtFixedRate(
!props.isInPip ? props.audioLevel > 0 : false,
SPEAKING_LINGER_MS
);
const previousSharingScreen = usePrevious(sharingScreen, sharingScreen);
2023-02-28 20:01:52 +00:00
const isImageDataCached =
sharingScreen && imageDataCache.current?.has(demuxId);
2021-11-11 22:43:05 +00:00
const [hasReceivedVideoRecently, setHasReceivedVideoRecently] =
useState(isImageDataCached);
2021-01-08 18:58:28 +00:00
const [isWide, setIsWide] = useState<boolean>(
videoAspectRatio ? videoAspectRatio >= 1 : true
);
2024-01-23 19:08:21 +00:00
const [showErrorDialog, setShowErrorDialog] = useState(false);
// We have some state (`hasReceivedVideoRecently`) and this ref. We can't have a
// single state value like `lastReceivedVideoAt` because (1) it won't automatically
// trigger a re-render after the video has become stale (2) it would cause a full
// re-render of the component for every frame, which is way too often.
//
// Alternatively, we could create a timeout that's reset every time we get a video
// frame (perhaps using a debounce function), but that becomes harder to clean up
// when the component unmounts.
const lastReceivedVideoAt = useRef(-Infinity);
const remoteVideoRef = useRef<HTMLCanvasElement | null>(null);
const canvasContextRef = useRef<CanvasRenderingContext2D | null>(null);
const imageDataRef = useRef<ImageData | null>(null);
2020-11-13 19:57:55 +00:00
2021-11-11 22:43:05 +00:00
const [intersectionRef, intersectionObserverEntry] =
useIntersectionObserver();
2021-01-08 18:58:28 +00:00
const isVisible = intersectionObserverEntry
? intersectionObserverEntry.isIntersecting
: true;
useEffect(() => {
onVisibilityChanged?.(demuxId, isVisible);
}, [demuxId, isVisible, onVisibilityChanged]);
useEffect(() => {
if (sharingScreen !== previousSharingScreen) {
imageDataCache.current?.delete(demuxId);
}
}, [demuxId, imageDataCache, previousSharingScreen, sharingScreen]);
const wantsToShowVideo = hasRemoteVideo && !isBlocked && isVisible;
const hasVideoToShow = wantsToShowVideo && hasReceivedVideoRecently;
2024-01-23 19:08:21 +00:00
const showMissingMediaKeys = Boolean(
!mediaKeysReceived &&
addedTime &&
isOlderThan(addedTime, DELAY_TO_SHOW_MISSING_MEDIA_KEYS)
);
const videoFrameSource = useMemo(
() => getGroupCallVideoFrameSource(demuxId),
[getGroupCallVideoFrameSource, demuxId]
2020-11-17 19:49:48 +00:00
);
const renderVideoFrame = useCallback(() => {
const frameAge = Date.now() - lastReceivedVideoAt.current;
const maxFrameAge = sharingScreen
? MAX_TIME_TO_SHOW_STALE_SCREENSHARE_FRAMES
: MAX_TIME_TO_SHOW_STALE_VIDEO_FRAMES;
if (frameAge > maxFrameAge) {
// We consider that we have received video recently from a remote participant if
// we have received it recently relative to the last time we had a connection. If
// we lost their video due to our reconnecting, we still want to show the last
// frame of video (blurred out) until we have reconnected.
if (!isCallReconnecting) {
setHasReceivedVideoRecently(false);
}
}
const canvasEl = remoteVideoRef.current;
if (!canvasEl) {
return;
}
const canvasContext = canvasContextRef.current;
if (!canvasContext) {
return;
}
// This frame buffer is shared by all participants, so it may contain pixel data
// for other participants, or pixel data from a previous frame. That's why we
// return early and use the `frameWidth` and `frameHeight`.
let frameWidth: number | undefined;
let frameHeight: number | undefined;
let imageData = imageDataRef.current;
const frameBuffer = getFrameBuffer();
2023-02-18 03:25:24 +00:00
const frameDimensions = videoFrameSource.receiveVideoFrame(
frameBuffer,
MAX_FRAME_WIDTH,
MAX_FRAME_HEIGHT
);
if (frameDimensions) {
[frameWidth, frameHeight] = frameDimensions;
if (
frameWidth < 2 ||
frameHeight < 2 ||
frameWidth > MAX_FRAME_WIDTH ||
frameHeight > MAX_FRAME_HEIGHT
) {
return;
}
if (
imageData?.width !== frameWidth ||
imageData?.height !== frameHeight
) {
imageData = new ImageData(frameWidth, frameHeight);
imageDataRef.current = imageData;
}
imageData.data.set(
frameBuffer.subarray(0, frameWidth * frameHeight * 4)
);
2020-11-17 19:49:48 +00:00
// Screen share is at a slow FPS so updates slowly if we PiP then restore.
// Cache the image data so we can quickly show the most recent frame.
if (sharingScreen) {
imageDataCache.current?.set(demuxId, imageData);
}
} else if (sharingScreen && !imageData) {
// Try to use the screenshare cache the first time we show
const cachedImageData = imageDataCache.current?.get(demuxId);
if (cachedImageData) {
frameWidth = cachedImageData.width;
frameHeight = cachedImageData.height;
imageDataRef.current = cachedImageData;
imageData = cachedImageData;
}
}
if (!frameWidth || !frameHeight || !imageData) {
return;
}
canvasEl.width = frameWidth;
canvasEl.height = frameHeight;
canvasContext.putImageData(imageData, 0, 0);
lastReceivedVideoAt.current = Date.now();
setHasReceivedVideoRecently(true);
setIsWide(frameWidth > frameHeight);
}, [
demuxId,
imageDataCache,
isCallReconnecting,
sharingScreen,
videoFrameSource,
getFrameBuffer,
]);
useEffect(() => {
if (!hasRemoteVideo) {
setHasReceivedVideoRecently(false);
}
}, [hasRemoteVideo]);
useEffect(() => {
2021-01-08 18:58:28 +00:00
if (!hasRemoteVideo || !isVisible) {
return noop;
}
let rafId = requestAnimationFrame(tick);
function tick() {
renderVideoFrame();
rafId = requestAnimationFrame(tick);
}
return () => {
cancelAnimationFrame(rafId);
};
2021-01-08 18:58:28 +00:00
}, [hasRemoteVideo, isVisible, renderVideoFrame, videoFrameSource]);
let canvasStyles: CSSProperties;
let containerStyles: CSSProperties;
2020-11-17 19:49:48 +00:00
// If our `width` and `height` props don't match the canvas's aspect ratio, we want to
// fill the container. This can happen when RingRTC gives us an inaccurate
// `videoAspectRatio`, or if the container is an unexpected size.
if (isWide) {
canvasStyles = { width: '100%' };
2020-11-13 19:57:55 +00:00
} else {
canvasStyles = { height: '100%' };
2020-11-17 19:49:48 +00:00
}
let avatarSize: number;
let footerInfoElement: ReactNode;
if (props.isInPip) {
containerStyles = canvasStyles;
2022-12-09 20:37:45 +00:00
avatarSize = AvatarSize.FORTY_EIGHT;
} else {
2021-01-08 18:58:28 +00:00
const { width, height } = props;
const shorterDimension = Math.min(width, height);
2022-12-09 20:37:45 +00:00
if (shorterDimension >= 180) {
avatarSize = AvatarSize.NINETY_SIX;
} else {
2022-12-09 20:37:45 +00:00
avatarSize = AvatarSize.FORTY_EIGHT;
}
containerStyles = {
height,
width,
};
2021-01-08 18:58:28 +00:00
if ('top' in props) {
containerStyles.position = 'absolute';
2023-11-08 21:43:08 +00:00
containerStyles.insetInlineStart = `${props.left}px`;
containerStyles.top = `${props.top}px`;
2021-01-08 18:58:28 +00:00
}
const nameElement = (
<ContactName
module="module-ongoing-call__group-call-remote-participant__info__contact-name"
title={title}
/>
);
if (isHandRaised) {
footerInfoElement = (
<button
className="module-ongoing-call__group-call-remote-participant__info module-ongoing-call__group-call-remote-participant__info--clickable"
onClick={onClickRaisedHand}
type="button"
>
<div className="CallingStatusIndicator CallingStatusIndicator--HandRaised" />
{nameElement}
</button>
);
} else {
footerInfoElement = (
<div className="module-ongoing-call__group-call-remote-participant__info">
{nameElement}
</div>
);
}
2020-11-13 19:57:55 +00:00
}
2020-11-17 19:49:48 +00:00
2024-01-23 19:08:21 +00:00
let noVideoNode: ReactNode;
if (!hasVideoToShow) {
const showDialogButton = (
<button
type="button"
className="module-ongoing-call__group-call-remote-participant__more-info"
onClick={() => {
setShowErrorDialog(true);
}}
>
{i18n('icu:moreInfo')}
</button>
);
if (isBlocked) {
noVideoNode = (
<>
<i className="module-ongoing-call__group-call-remote-participant__error-icon module-ongoing-call__group-call-remote-participant__error-icon--blocked" />
<div className="module-ongoing-call__group-call-remote-participant__error">
{i18n('icu:calling__blocked-participant', { name: title })}
</div>
2024-01-23 19:08:21 +00:00
{showDialogButton}
</>
);
} else if (showMissingMediaKeys) {
noVideoNode = (
<>
<i className="module-ongoing-call__group-call-remote-participant__error-icon module-ongoing-call__group-call-remote-participant__error-icon--missing-media-keys" />
<div className="module-ongoing-call__group-call-remote-participant__error">
{i18n('icu:calling__missing-media-keys', { name: title })}
</div>
{showDialogButton}
</>
);
} else {
noVideoNode = (
<Avatar
acceptedMessageRequest={acceptedMessageRequest}
2024-07-11 19:44:09 +00:00
avatarUrl={avatarUrl}
2024-01-23 19:08:21 +00:00
badge={undefined}
color={color || AvatarColors[0]}
noteToSelf={false}
conversationType="direct"
i18n={i18n}
isMe={isMe}
profileName={profileName}
title={title}
sharedGroupNames={sharedGroupNames}
size={avatarSize}
/>
);
}
}
// Error dialog maintains state, so if you have it open and the underlying
// error changes or resolves, you can keep reading the same dialog info.
const [errorDialogTitle, setErrorDialogTitle] = useState<ReactNode | null>(
null
);
const [errorDialogBody, setErrorDialogBody] = useState<string>('');
useEffect(() => {
if (hasVideoToShow || showErrorDialog) {
return;
}
if (isBlocked) {
setErrorDialogTitle(
<div className="module-ongoing-call__group-call-remote-participant__more-info-modal-title">
2024-05-15 21:48:02 +00:00
<I18n
i18n={i18n}
id="icu:calling__block-info-title"
components={{
name: <ContactName key="name" title={title} />,
}}
/>
</div>
);
setErrorDialogBody(i18n('icu:calling__block-info'));
} else if (showMissingMediaKeys) {
setErrorDialogTitle(
<div className="module-ongoing-call__group-call-remote-participant__more-info-modal-title">
2024-05-15 21:48:02 +00:00
<I18n
i18n={i18n}
id="icu:calling__missing-media-keys"
components={{
name: <ContactName key="name" title={title} />,
}}
/>
</div>
);
setErrorDialogBody(i18n('icu:calling__missing-media-keys-info'));
} else {
setErrorDialogTitle(null);
setErrorDialogBody('');
}
}, [
hasVideoToShow,
i18n,
isBlocked,
showErrorDialog,
showMissingMediaKeys,
title,
]);
return (
<>
2024-01-23 19:08:21 +00:00
{showErrorDialog && (
<ConfirmationDialog
2022-09-27 20:24:21 +00:00
dialogName="GroupCallRemoteParticipant.blockInfo"
2023-03-30 00:03:25 +00:00
cancelText={i18n('icu:ok')}
i18n={i18n}
2024-01-23 19:08:21 +00:00
onClose={() => setShowErrorDialog(false)}
theme={Theme.Dark}
title={errorDialogTitle}
>
2024-01-23 19:08:21 +00:00
{errorDialogBody}
</ConfirmationDialog>
)}
<div
2023-02-28 20:01:52 +00:00
className={classNames(
'module-ongoing-call__group-call-remote-participant',
isSpeaking &&
!isActiveSpeakerInSpeakerView &&
remoteParticipantsCount > 1 &&
2023-12-06 21:52:29 +00:00
'module-ongoing-call__group-call-remote-participant--speaking',
isHandRaised &&
'module-ongoing-call__group-call-remote-participant--hand-raised'
2023-02-28 20:01:52 +00:00
)}
2021-01-08 18:58:28 +00:00
ref={intersectionRef}
style={containerStyles}
>
{!props.isInPip && (
<>
<CallingAudioIndicator
hasAudio={hasRemoteAudio}
2022-05-19 03:28:51 +00:00
audioLevel={props.audioLevel}
2023-02-28 20:01:52 +00:00
shouldShowSpeaking={isSpeaking}
/>
2023-12-06 21:52:29 +00:00
<div className="module-ongoing-call__group-call-remote-participant__footer">
{footerInfoElement}
</div>
</>
)}
{wantsToShowVideo && (
<canvas
className={classNames(
'module-ongoing-call__group-call-remote-participant__remote-video',
isCallReconnecting &&
'module-ongoing-call__group-call-remote-participant__remote-video--reconnecting'
)}
style={{
...canvasStyles,
// If we want to show video but don't have any yet, we still render the
// canvas invisibly. This lets us render frame data immediately without
// having to juggle anything.
...(hasVideoToShow ? {} : { display: 'none' }),
}}
ref={canvasEl => {
remoteVideoRef.current = canvasEl;
if (canvasEl) {
2024-04-17 16:57:31 +00:00
canvasContextRef.current = canvasEl.getContext('2d');
} else {
canvasContextRef.current = null;
}
}}
/>
)}
2024-01-23 19:08:21 +00:00
{noVideoNode && (
<CallBackgroundBlur
avatarUrl={isBlocked ? undefined : avatarUrl}
className="module-ongoing-call__group-call-remote-participant-background"
>
2024-01-23 19:08:21 +00:00
{noVideoNode}
</CallBackgroundBlur>
)}
</div>
</>
);
2020-11-13 19:57:55 +00:00
}
);