center avatar in draggable video window and handle mute video events

This commit is contained in:
Audric Ackermann 2021-10-18 14:45:40 +11:00
parent ecceaeaa8f
commit 678a5bcb3b
No known key found for this signature in database
GPG Key ID: 999F434D76324AD4
3 changed files with 119 additions and 44 deletions

View File

@ -13,6 +13,8 @@ import {
getSelectedConversationKey,
} from '../../../state/selectors/conversations';
import { openConversationWithMessages } from '../../../state/ducks/conversations';
import { Avatar, AvatarSize } from '../../Avatar';
import { getConversationController } from '../../../session/conversations';
export const DraggableCallWindow = styled.div`
position: absolute;
@ -26,10 +28,11 @@ export const DraggableCallWindow = styled.div`
border: var(--session-border);
`;
export const StyledVideoElement = styled.video`
export const StyledVideoElement = styled.video<{ isRemoteVideoMuted: boolean }>`
padding: 0 1rem;
height: 100%;
width: 100%;
opacity: ${props => (props.isRemoteVideoMuted ? 0 : 1)};
`;
const StyledDraggableVideoElement = styled(StyledVideoElement)`
@ -40,6 +43,20 @@ const DraggableCallWindowInner = styled.div`
cursor: pointer;
`;
const CenteredAvatarInDraggable = styled.div`
position: absolute;
width: 100%;
top: 0;
bottom: 0;
left: 0;
right: 50%;
min-height: 85px;
min-width: 85px;
display: flex;
justify-content: center;
align-items: center;
`;
// TODO:
/**
* Add mute input, deafen, end call, possibly add person to call
@ -54,6 +71,7 @@ export const DraggableCallContainer = () => {
const [positionY, setPositionY] = useState(window.innerHeight / 2);
const [lastPositionX, setLastPositionX] = useState(0);
const [lastPositionY, setLastPositionY] = useState(0);
const [isRemoteVideoMuted, setIsRemoteVideoMuted] = useState(true);
const ongoingCallPubkey = ongoingCallProps?.id;
const videoRefRemote = useRef<any>(undefined);
@ -77,9 +95,16 @@ export const DraggableCallContainer = () => {
useEffect(() => {
if (ongoingCallPubkey !== selectedConversationKey) {
CallManager.setVideoEventsListener(
(_localStream: MediaStream | null, remoteStream: MediaStream | null) => {
(
_localStream: MediaStream | null,
remoteStream: MediaStream | null,
_camerasList: any,
_audioList: any,
remoteVideoIsMuted: boolean
) => {
if (mountedState() && videoRefRemote?.current) {
videoRefRemote.current.srcObject = remoteStream;
setIsRemoteVideoMuted(remoteVideoIsMuted);
}
}
);
@ -99,6 +124,13 @@ export const DraggableCallContainer = () => {
if (!hasOngoingCall || !ongoingCallProps || ongoingCallPubkey === selectedConversationKey) {
return null;
}
const ongoingCallUsername = ongoingCallProps?.profileName || ongoingCallProps?.name;
const avatarPath = ongoingCallPubkey
? getConversationController()
.get(ongoingCallPubkey)
.getAvatarPath()
: undefined;
return (
<Draggable
@ -120,7 +152,21 @@ export const DraggableCallContainer = () => {
>
<DraggableCallWindow className="dragHandle">
<DraggableCallWindowInner>
<StyledDraggableVideoElement ref={videoRefRemote} autoPlay={true} />
<StyledDraggableVideoElement
ref={videoRefRemote}
autoPlay={true}
isRemoteVideoMuted={isRemoteVideoMuted}
/>
{isRemoteVideoMuted && (
<CenteredAvatarInDraggable>
<Avatar
size={AvatarSize.XL}
avatarPath={avatarPath}
name={ongoingCallUsername}
pubkey={ongoingCallPubkey}
/>
</CenteredAvatarInDraggable>
)}
</DraggableCallWindowInner>
</DraggableCallWindow>
</Draggable>

View File

@ -123,9 +123,8 @@ const AudioInputMenu = ({
);
};
const CenteredAvatar = styled.div`
const CenteredAvatarInConversation = styled.div`
position: absolute;
top: 0;
bottom: 0;
left: 0;
@ -172,13 +171,12 @@ export const InConversationCallContainer = () => {
localStream: MediaStream | null,
remoteStream: MediaStream | null,
camerasList: Array<InputItem>,
audioInputList: Array<InputItem>
audioInputList: Array<InputItem>,
isRemoteVideoStreamMuted: boolean
) => {
if (mountedState() && videoRefRemote?.current && videoRefLocal?.current) {
videoRefLocal.current.srcObject = localStream;
setIsRemoteVideoMuted(
Boolean(remoteStream?.getTracks().find(t => t.kind === 'video')?.muted)
);
setIsRemoteVideoMuted(isRemoteVideoStreamMuted);
videoRefRemote.current.srcObject = remoteStream;
setCurrentConnectedCameras(camerasList);
@ -262,20 +260,29 @@ export const InConversationCallContainer = () => {
<InConvoCallWindow>
<RelativeCallWindow>
<VideoContainer>
<StyledVideoElement ref={videoRefRemote} autoPlay={true} />
{isRemoteVideoMuted && ongoingCallPubkey && (
<CenteredAvatar>
<StyledVideoElement
ref={videoRefRemote}
autoPlay={true}
isRemoteVideoMuted={isRemoteVideoMuted}
/>
{isRemoteVideoMuted && (
<CenteredAvatarInConversation>
<Avatar
size={AvatarSize.XL}
avatarPath={avatarPath}
name={ongoingCallUsername}
pubkey={ongoingCallPubkey}
/>
</CenteredAvatar>
</CenteredAvatarInConversation>
)}
</VideoContainer>
<VideoContainer>
<StyledVideoElement ref={videoRefLocal} autoPlay={true} muted={true} />
<StyledVideoElement
ref={videoRefLocal}
autoPlay={true}
muted={true}
isRemoteVideoMuted={false}
/>
</VideoContainer>
<InConvoCallWindowControls>

View File

@ -17,6 +17,7 @@ import { CallMessage } from '../messages/outgoing/controlMessage/CallMessage';
import { ed25519Str } from '../onions/onionPath';
import { getMessageQueue } from '../sending';
import { PubKey } from '../types';
export type InputItem = { deviceId: string; label: string };
// const VIDEO_WIDTH = 640;
@ -27,14 +28,21 @@ type CallManagerListener =
localStream: MediaStream | null,
remoteStream: MediaStream | null,
camerasList: Array<InputItem>,
audioInputsList: Array<InputItem>
audioInputsList: Array<InputItem>,
isRemoteVideoStreamMuted: boolean
) => void)
| null;
let videoEventsListener: CallManagerListener;
function callVideoListener() {
if (videoEventsListener) {
videoEventsListener(mediaDevices, remoteStream, camerasList, audioInputsList);
videoEventsListener(
mediaDevices,
remoteStream,
camerasList,
audioInputsList,
remoteVideoStreamIsMuted
);
}
}
@ -52,6 +60,8 @@ let peerConnection: RTCPeerConnection | null;
let dataChannel: RTCDataChannel | null;
let remoteStream: MediaStream | null;
let mediaDevices: MediaStream | null;
let remoteVideoStreamIsMuted = true;
export const INPUT_DISABLED_DEVICE_ID = 'off';
let makingOffer = false;
@ -108,6 +118,15 @@ async function updateInputLists() {
}));
}
function sendVideoStatusViaDataChannel() {
const videoEnabledLocally =
selectedCameraId !== undefined && selectedCameraId !== INPUT_DISABLED_DEVICE_ID;
const stringToSend = JSON.stringify({
video: videoEnabledLocally,
});
dataChannel?.send(stringToSend);
}
export async function selectCameraByDeviceId(cameraDeviceId: string) {
if (cameraDeviceId === INPUT_DISABLED_DEVICE_ID) {
selectedCameraId = cameraDeviceId;
@ -118,6 +137,7 @@ export async function selectCameraByDeviceId(cameraDeviceId: string) {
if (sender?.track) {
sender.track.enabled = false;
}
sendVideoStatusViaDataChannel();
return;
}
if (camerasList.some(m => m.deviceId === cameraDeviceId)) {
@ -146,6 +166,7 @@ export async function selectCameraByDeviceId(cameraDeviceId: string) {
mediaDevices?.removeTrack(t);
});
mediaDevices?.addTrack(videoTrack);
sendVideoStatusViaDataChannel();
} else {
throw new Error('Failed to get sender for selectCameraByDeviceId ');
}
@ -205,6 +226,9 @@ async function handleNegotiationNeededEvent(_event: Event, recipient: string) {
offerToReceiveAudio: true,
offerToReceiveVideo: true,
});
if (!offer) {
throw new Error('Could not create an offer');
}
await peerConnection?.setLocalDescription(offer);
if (offer && offer.sdp) {
@ -394,10 +418,28 @@ function closeVideoCall() {
mediaDevices = null;
remoteStream = null;
if (videoEventsListener) {
videoEventsListener(null, null, [], []);
videoEventsListener(null, null, [], [], true);
}
}
function onDataChannelReceivedMessage(ev: MessageEvent<string>) {
try {
const parsed = JSON.parse(ev.data);
if (parsed.video !== undefined) {
remoteVideoStreamIsMuted = !Boolean(parsed.video);
}
callVideoListener();
} catch (e) {
window.log.warn('onDataChannelReceivedMessage Could not parse data in event', ev);
}
}
function onDataChannelOnOpen() {
window.log.info('onDataChannelOnOpen: sending video status');
sendVideoStatusViaDataChannel();
}
function createOrGetPeerConnection(withPubkey: string, createDataChannel: boolean) {
if (peerConnection) {
return peerConnection;
@ -412,41 +454,21 @@ function createOrGetPeerConnection(withPubkey: string, createDataChannel: boolea
peerConnection.ondatachannel = e => {
if (!createDataChannel) {
dataChannel = e.channel;
console.warn('ondatachannel');
window.log.info('Got our datachannel setup');
setInterval(() => {
console.warn('ondatachannel: sending yoooooo');
onDataChannelOnOpen();
dataChannel?.send('yooooooooooooooo: ' + Date.now());
}, 1000);
dataChannel.onmessage = e => {
console.warn('ondatachannel: datachannel on message', e);
};
dataChannel.onmessage = onDataChannelReceivedMessage;
}
};
if (createDataChannel) {
console.warn('createOrGetPeerConnection: createDataChannel');
// console.warn('createOrGetPeerConnection: createDataChannel');
dataChannel = peerConnection.createDataChannel('session-datachannel');
dataChannel.onmessage = e => {
console.warn('createDataChannel: datachannel on message', e);
};
dataChannel.onopen = () => {
window.log.info('onopen of datachannel');
const videoEnabledLocally =
selectedCameraId !== undefined && selectedCameraId !== INPUT_DISABLED_DEVICE_ID;
dataChannel?.send(
JSON.stringify({
video: videoEnabledLocally,
})
);
};
dataChannel.onclose = () => {
window.log.info('onclose of datachannel');
};
dataChannel.onmessage = onDataChannelReceivedMessage;
dataChannel.onopen = onDataChannelOnOpen;
}
peerConnection.onsignalingstatechange = handleSignalingStateChangeEvent;
@ -563,7 +585,7 @@ export function handleCallTypeEndCall(sender: string) {
window.log.info('handling callMessage END_CALL');
if (videoEventsListener) {
videoEventsListener(null, null, [], []);
videoEventsListener(null, null, [], [], true);
}
closeVideoCall();
//