Merge pull request #2024 from Bilb/setup-test-playwright

Webrtc calls caller UI +  playwright setup
This commit is contained in:
Audric Ackermann 2021-11-12 16:13:37 +11:00 committed by GitHub
commit af7cdfc4b4
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
68 changed files with 3069 additions and 3548 deletions

View File

@ -1,30 +0,0 @@
#!/bin/sh
if [ -z "$husky_skip_init" ]; then
debug () {
[ "$HUSKY_DEBUG" = "1" ] && echo "husky (debug) - $1"
}
readonly hook_name="$(basename "$0")"
debug "starting $hook_name..."
if [ "$HUSKY" = "0" ]; then
debug "HUSKY env variable is set to 0, skipping hook"
exit 0
fi
if [ -f ~/.huskyrc ]; then
debug "sourcing ~/.huskyrc"
. ~/.huskyrc
fi
export readonly husky_skip_init=1
sh -e "$0" "$@"
exitCode="$?"
if [ $exitCode != 0 ]; then
echo "husky - $hook_name hook exited with code $exitCode (error)"
exit $exitCode
fi
exit 0
fi

View File

@ -149,8 +149,8 @@
"linkPreviewsTitle": "Send Link Previews",
"linkPreviewDescription": "Previews are supported for most urls",
"linkPreviewsConfirmMessage": "You will not have full metadata protection when sending link previews.",
"mediaPermissionsTitle": "Microphone and Camera",
"mediaPermissionsDescription": "Allow access to camera and microphone",
"mediaPermissionsTitle": "Microphone",
"mediaPermissionsDescription": "Allow access to microphone",
"spellCheckTitle": "Spell Check",
"spellCheckDescription": "Enable spell check of text entered in message composition box",
"spellCheckDirty": "You must restart Session to apply your new settings",
@ -436,20 +436,24 @@
"notificationSubtitle": "Notifications - $setting$",
"surveyTitle": "Take our Session Survey",
"goToOurSurvey": "Go to our survey",
"incomingCall": "Incoming call",
"incomingCallFrom": "Incoming call from '$name$'",
"ringing": "Ringing...",
"establishingConnection": "Establishing connection...",
"accept": "Accept",
"decline": "Decline",
"endCall": "End call",
"micAndCameraPermissionNeededTitle": "Camera and Microphone access required",
"micAndCameraPermissionNeeded": "You can enable microphone and camera access under: Settings (Gear icon) => Privacy",
"cameraPermissionNeededTitle": "Voice/Video Call permissions required",
"cameraPermissionNeeded": "You can enable the 'Voice and video calls' permission in the Privacy Settings.",
"unableToCall": "cancel your ongoing call first",
"unableToCallTitle": "Cannot start new call",
"callMissed": "Missed call from $name$",
"callMissedTitle": "Call missed",
"startVideoCall": "Start Video Call",
"noCameraFound": "No camera found",
"noAudioInputFound": "No audio input found",
"noAudioOutputFound": "No audio output found",
"callMediaPermissionsTitle": "Voice and video calls",
"callMissedCausePermission": "Call missed from '$name$' because you need to enable the 'Voice and video calls' permission in the Privacy Settings.",
"callMediaPermissionsDescription": "Allows access to accept voice and video calls from other users",
"callMediaPermissionsDialogContent": "The current implementation of voice/video calls will expose your IP address to the Oxen Foundation servers and the calling/called user."
"callMediaPermissionsDialogContent": "The current implementation of voice/video calls will expose your IP address to the Oxen Foundation servers and the calling/called user.",
"menuCall": "Call"
}

View File

@ -4,5 +4,5 @@
"url": "http://public.loki.foundation:38157/"
}
],
"openDevTools": true
"openDevTools": false
}

BIN
fixtures/ringing.mp3 Normal file

Binary file not shown.

File diff suppressed because it is too large Load Diff

View File

@ -36,6 +36,17 @@
textsecure.storage.put('is_sign_in_by_linking', isLinking);
},
isSignWithRecoveryPhrase() {
const isRecoveryPhraseUsed = textsecure.storage.get('is_sign_in_recovery_phrase');
if (isRecoveryPhraseUsed === undefined) {
return false;
}
return isRecoveryPhraseUsed;
},
setSignWithRecoveryPhrase(isRecoveryPhraseUsed) {
textsecure.storage.put('is_sign_in_recovery_phrase', isRecoveryPhraseUsed);
},
getLastProfileUpdateTimestamp() {
return textsecure.storage.get('last_profile_update_timestamp');
},

View File

@ -43,6 +43,7 @@
"format-full": "prettier --list-different --write \"*.{css,js,json,scss,ts,tsx}\" \"./**/*.{css,js,json,scss,ts,tsx}\"",
"transpile": "tsc --incremental",
"transpile:watch": "tsc -w",
"integration-test": "mocha --recursive --exit --timeout 30000 \"./ts/test-integration/**/*.test.js\" \"./ts/test/*.test.js\"",
"clean-transpile": "rimraf 'ts/**/*.js ts/*.js' 'ts/*.js.map' 'ts/**/*.js.map' && rimraf tsconfig.tsbuildinfo;",
"ready": "yarn clean-transpile; yarn grunt && yarn lint-full && yarn test",
"build:webpack:sql-worker": "cross-env NODE_ENV=production webpack -c webpack-sql-worker.config.ts",
@ -133,6 +134,7 @@
"uuid": "3.3.2"
},
"devDependencies": {
"@playwright/test": "^1.16.3",
"@types/backbone": "^1.4.2",
"@types/better-sqlite3": "5.4.1",
"@types/blueimp-load-image": "^2.23.8",
@ -202,6 +204,7 @@
"mocha-testcheck": "1.0.0-rc.0",
"node-gyp": "3.8.0",
"node-sass-import-once": "1.2.0",
"playwright": "^1.16.3",
"postinstall-prepare": "^1.0.1",
"prettier": "1.19.0",
"qs": "6.5.1",

View File

@ -39,7 +39,7 @@ window.isBehindProxy = () => Boolean(config.proxyUrl);
window.lokiFeatureFlags = {
useOnionRequests: true,
useCallMessage: false,
useCallMessage: true,
};
window.isBeforeVersion = (toCheck, baseVersion) => {

View File

@ -157,6 +157,7 @@ message DataMessage {
message CallMessage {
enum Type {
PRE_OFFER = 6;
OFFER = 1;
ANSWER = 2;
PROVISIONAL_ANSWER = 3;
@ -170,6 +171,9 @@ message CallMessage {
repeated uint32 sdpMLineIndexes = 3;
repeated string sdpMids = 4;
// @required
required string uuid = 5;
}
message ConfigurationMessage {

View File

@ -170,6 +170,7 @@
line-height: 16px;
letter-spacing: 0.3px;
margin-top: 3px;
white-space: nowrap;
}
.module-message__link-preview {

View File

@ -734,6 +734,8 @@ label {
display: flex;
justify-content: center;
background: white;
padding: 0.5rem;
svg {
width: 135px;
height: 135px;

View File

@ -115,15 +115,6 @@
border-left: var(--border-session);
border-top: var(--border-session);
&__blocking-overlay {
background-color: rgba(0, 0, 0, 0.8);
position: absolute;
top: 0px;
bottom: 0px;
left: 0px;
right: 0px;
}
}
.conversation-info-panel {
@ -164,6 +155,7 @@
min-height: min-content;
background: var(--color-cell-background);
border-top: var(--border-session);
z-index: 1;
.session-icon-button {
// & > .session-icon-button {
@ -196,6 +188,8 @@
flex-grow: 1;
min-height: $composition-container-height;
padding: $session-margin-xs 0;
z-index: 1;
background-color: inherit;
ul {
max-height: 70vh;

View File

@ -35,6 +35,15 @@ const SpacerStyled = styled.div<SpacerProps>`
: props.size === 'sm'
? 'var(--margins-sm)'
: 'var(--margins-xs)'};
width: ${props =>
props.size === 'lg'
? 'var(--margins-lg)'
: props.size === 'md'
? 'var(--margins-md)'
: props.size === 'sm'
? 'var(--margins-sm)'
: 'var(--margins-xs)'};
`;
const Spacer = (props: SpacerProps) => {

View File

@ -14,6 +14,10 @@ import {
getConversationHeaderProps,
getConversationHeaderTitleProps,
getCurrentNotificationSettingText,
getHasIncomingCall,
getHasOngoingCall,
getIsSelectedNoteToSelf,
getIsSelectedPrivate,
getSelectedConversation,
getSelectedConversationIsPublic,
getSelectedConversationKey,
@ -35,6 +39,7 @@ import {
openRightPanel,
resetSelectedMessageIds,
} from '../../state/ducks/conversations';
import { callRecipient } from '../../interactions/conversationInteractions';
export interface TimerOption {
name: string;
@ -128,7 +133,7 @@ const SelectionOverlay = () => {
const TripleDotsMenu = (props: { triggerId: string; showBackButton: boolean }) => {
const { showBackButton } = props;
if (showBackButton) {
return <></>;
return null;
}
return (
<div
@ -202,6 +207,32 @@ const BackButton = (props: { onGoBack: () => void; showBackButton: boolean }) =>
);
};
const CallButton = () => {
const isPrivate = useSelector(getIsSelectedPrivate);
const isMe = useSelector(getIsSelectedNoteToSelf);
const selectedConvoKey = useSelector(getSelectedConversationKey);
const hasIncomingCall = useSelector(getHasIncomingCall);
const hasOngoingCall = useSelector(getHasOngoingCall);
const canCall = !(hasIncomingCall || hasOngoingCall);
if (!isPrivate || isMe || !selectedConvoKey) {
return null;
}
return (
<SessionIconButton
iconType="phone"
iconSize="large"
iconPadding="2px"
margin="0 10px 0 0"
onClick={() => {
void callRecipient(selectedConvoKey, canCall);
}}
/>
);
};
export const StyledSubtitleContainer = styled.div`
display: flex;
flex-direction: row;
@ -362,17 +393,20 @@ export const ConversationHeaderWithDetails = () => {
{!isKickedFromGroup && <ExpirationLength expirationSettingName={expirationSettingName} />}
{!isSelectionMode && (
<AvatarHeader
onAvatarClick={() => {
dispatch(openRightPanel());
}}
pubkey={conversationKey}
showBackButton={isMessageDetailOpened}
avatarPath={avatarPath}
memberAvatars={memberDetails}
name={name}
profileName={profileName}
/>
<>
<CallButton />
<AvatarHeader
onAvatarClick={() => {
dispatch(openRightPanel());
}}
pubkey={conversationKey}
showBackButton={isMessageDetailOpened}
avatarPath={avatarPath}
memberAvatars={memberDetails}
name={name}
profileName={profileName}
/>
</>
)}
<MemoConversationHeaderMenu

View File

@ -3,7 +3,7 @@ import { PropsForDataExtractionNotification } from '../../models/messageType';
import { SignalService } from '../../protobuf';
import { Flex } from '../basic/Flex';
import { SessionIcon } from '../session/icon';
import { SpacerXS, Text } from '../basic/Text';
import { SpacerSM, Text } from '../basic/Text';
import { ReadableMessage } from './ReadableMessage';
export const DataExtractionNotification = (props: PropsForDataExtractionNotification) => {
@ -25,13 +25,14 @@ export const DataExtractionNotification = (props: PropsForDataExtractionNotifica
>
<Flex
container={true}
flexDirection="column"
flexDirection="row"
alignItems="center"
justifyContent="center"
margin={'var(--margins-sm)'}
id={`msg-${messageId}`}
>
<SessionIcon iconType="upload" iconSize={'small'} iconRotation={180} />
<SpacerXS />
<SpacerSM />
<Text text={contentText} subtle={true} />
</Flex>
</ReadableMessage>

View File

@ -157,7 +157,7 @@ export const QuoteGenericFile = (
const { attachment, isIncoming } = props;
if (!attachment) {
return <></>;
return null;
}
const { fileName, contentType } = attachment;
@ -167,7 +167,7 @@ export const QuoteGenericFile = (
!MIME.isAudio(contentType);
if (!isGenericFile) {
return <></>;
return null;
}
return (

View File

@ -22,7 +22,7 @@ export const StagedLinkPreview = (props: Props) => {
const isImage = image && isImageAttachment(image);
if (isLoaded && !(title && domain)) {
return <></>;
return null;
}
const isLoading = !isLoaded;

View File

@ -26,7 +26,7 @@ const TypingBubbleContainer = styled.div<TypingBubbleProps>`
export const TypingBubble = (props: TypingBubbleProps) => {
if (props.conversationType === ConversationTypeEnum.GROUP) {
return <></>;
return null;
}
if (!props.isTyping) {

View File

@ -2,6 +2,7 @@ import classNames from 'classnames';
import React, { useCallback } from 'react';
import { useDispatch, useSelector } from 'react-redux';
import _ from 'underscore';
import { replyToMessage } from '../../../interactions/conversationInteractions';
import { MessageRenderingProps, QuoteClickOptions } from '../../../models/messageType';
import { toggleSelectedMessageId } from '../../../state/ducks/conversations';
import {
@ -44,6 +45,10 @@ export const MessageContentWithStatuses = (props: Props) => {
[window.contextMenuShown, props?.messageId, multiSelectMode, props?.isDetailView]
);
const onDoubleClickReplyToMessage = () => {
void replyToMessage(messageId);
};
const { messageId, onQuoteClick, ctxMenuID, isDetailView } = props;
if (!contentProps) {
return null;
@ -56,6 +61,7 @@ export const MessageContentWithStatuses = (props: Props) => {
className={classNames('module-message', `module-message--${direction}`)}
role="button"
onClick={onClickOnMessageOuterContainer}
onDoubleClick={onDoubleClickReplyToMessage}
style={{ width: hasAttachments && isTrustedForAttachmentDownload ? 'min-content' : 'auto' }}
>
<MessageStatus messageId={messageId} isCorrectSide={isIncoming} />

View File

@ -193,7 +193,7 @@ export class UpdateGroupMembersDialog extends React.Component<Props, State> {
const { zombies } = this.state;
if (!zombies.length) {
return <></>;
return null;
}
const zombieElements = zombies.map((member: ContactType, index: number) => {

View File

@ -46,8 +46,9 @@ import { loadDefaultRooms } from '../../opengroup/opengroupV2/ApiUtil';
import { ActionPanelOnionStatusLight } from '../dialog/OnionStatusPathDialog';
import { switchHtmlToDarkTheme, switchHtmlToLightTheme } from '../../state/ducks/SessionTheme';
import { DraggableCallContainer } from './calling/CallContainer';
import { DraggableCallContainer } from './calling/DraggableCallContainer';
import { IncomingCallDialog } from './calling/IncomingCallDialog';
import { CallInFullScreenContainer } from './calling/CallInFullScreenContainer';
const Section = (props: { type: SectionType; avatarPath?: string | null }) => {
const ourNumber = useSelector(getOurNumber);
@ -223,13 +224,21 @@ const doAppStartUp = () => {
// trigger a sync message if needed for our other devices
void triggerSyncIfNeeded();
void getSwarmPollingInstance().start();
void loadDefaultRooms();
debounce(triggerAvatarReUploadIfNeeded, 200);
};
// TODO: Investigate the case where we reconnect
void getSwarmPollingInstance().start();
const CallContainer = () => {
return (
<>
<DraggableCallContainer />
<IncomingCallDialog />
<CallInFullScreenContainer />
</>
);
};
/**
@ -263,7 +272,7 @@ export const ActionsPanel = () => {
if (!ourPrimaryConversation) {
window?.log?.warn('ActionsPanel: ourPrimaryConversation is not set');
return <></>;
return null;
}
useInterval(() => {
@ -290,9 +299,7 @@ export const ActionsPanel = () => {
<>
<ModalContainer />
<DraggableCallContainer />
<IncomingCallDialog />
<CallContainer />
<div className="module-left-pane__sections-container">
<Section type={SectionType.Profile} avatarPath={ourPrimaryConversation.avatarPath} />
<Section type={SectionType.Message} />

View File

@ -10,6 +10,7 @@ import { recoveryPhraseModal } from '../../state/ducks/modalDialog';
import { Flex } from '../basic/Flex';
import { getFocusedSection } from '../../state/selectors/section';
import { SectionType } from '../../state/ducks/section';
import { UserUtils } from '../../session/utils';
const Tab = ({
isSelected,
@ -86,8 +87,9 @@ const BannerInner = () => {
export const LeftPaneBanner = () => {
const section = useSelector(getFocusedSection);
const isSignInWithRecoveryPhrase = UserUtils.isSignWithRecoveryPhrase();
if (section !== SectionType.Message) {
if (section !== SectionType.Message || isSignInWithRecoveryPhrase) {
return null;
}

View File

@ -50,7 +50,7 @@ export class SessionInboxView extends React.Component<any, State> {
public render() {
if (!this.state.isInitialLoadComplete) {
return <></>;
return null;
}
const persistor = persistStore(this.store);

View File

@ -162,7 +162,7 @@ export const SessionJoinableRooms = (props: { onRoomClicked: () => void }) => {
if (!joinableRooms.inProgress && !joinableRooms.rooms?.length) {
window?.log?.info('no default joinable rooms yet and not in progress');
return <></>;
return null;
}
const componentToRender = joinableRooms.inProgress ? (

View File

@ -13,7 +13,7 @@ const SessionToastContainerPrivate = () => {
rtl={false}
pauseOnFocusLoss={false}
draggable={false}
pauseOnHover={false}
pauseOnHover={true}
transition={Slide}
limit={5}
/>

View File

@ -0,0 +1,140 @@
import React, { useEffect, useRef, useState } from 'react';
import styled from 'styled-components';
type SplitViewProps = {
top: React.ReactElement;
bottom: React.ReactElement;
disableTop: boolean;
};
const SlyledSplitView = styled.div`
height: 100%;
display: flex;
flex-direction: column;
`;
const Divider = styled.div`
width: 100%;
cursor: row-resize;
height: 5px;
background-color: var(--color-cell-background);
margin-top: 2px;
`;
const DividerHandle = styled.div`
width: 10%;
height: 5px;
cursor: row-resize;
background-color: var(--color-text);
flex-shrink: 0;
position: relative;
left: 50%;
transform: translateX(-50%);
`;
const StyledTop = styled.div`
display: flex;
flex-direction: column;
flex-grow: 1;
`;
const TopSplitViewPanel = ({
children,
topHeight,
setTopHeight,
}: {
children: React.ReactNode;
topHeight: number | undefined;
setTopHeight: (value: number) => void;
}) => {
const topRef = useRef<HTMLDivElement>(null);
React.useEffect(() => {
if (topRef.current) {
if (!topHeight) {
setTopHeight(Math.max(MIN_HEIGHT_TOP, topRef.current?.clientHeight / 2));
return;
}
topRef.current.style.height = `${topHeight}px`;
topRef.current.style.minHeight = `${topHeight}px`;
}
}, [topRef, topHeight, setTopHeight]);
return <StyledTop ref={topRef}>{children}</StyledTop>;
};
const MIN_HEIGHT_TOP = 200;
const MIN_HEIGHT_BOTTOM = 0;
export const SplitViewContainer: React.FunctionComponent<SplitViewProps> = ({
disableTop,
top,
bottom,
}) => {
const [topHeight, setTopHeight] = useState<undefined | number>(undefined);
const [separatorYPosition, setSeparatorYPosition] = useState<undefined | number>(undefined);
const [dragging, setDragging] = useState(false);
const splitPaneRef = useRef<HTMLDivElement | null>(null);
const dividerRef = useRef<HTMLDivElement | null>(null);
function onMouseDown(e: any) {
setSeparatorYPosition(e.clientY);
setDragging(true);
}
function onWindowResize() {
if ((dividerRef?.current?.offsetTop || 0) + 200 > window.innerHeight) {
const clientY = Math.max(MIN_HEIGHT_TOP + 200, window.innerHeight / 2);
onMouseMove({ clientY }, true);
}
}
function onMouseUp() {
setDragging(false);
}
function onMouseMove(e: { clientY: number }, overrideIsDragging = false) {
if ((dragging || overrideIsDragging) && topHeight && separatorYPosition) {
const newTopHeight = topHeight + e.clientY - separatorYPosition;
setSeparatorYPosition(e.clientY);
if (newTopHeight < MIN_HEIGHT_TOP) {
setTopHeight(MIN_HEIGHT_TOP);
return;
}
if (splitPaneRef.current) {
const splitPaneHeight = splitPaneRef.current.clientHeight;
if (newTopHeight > splitPaneHeight - MIN_HEIGHT_BOTTOM) {
setTopHeight(splitPaneHeight - MIN_HEIGHT_BOTTOM);
return;
}
}
setTopHeight(newTopHeight);
}
}
useEffect(() => {
document.addEventListener('mousemove', onMouseMove);
document.addEventListener('mouseup', onMouseUp);
window.addEventListener('resize', onWindowResize);
return () => {
document.removeEventListener('mousemove', onMouseMove);
document.removeEventListener('mouseup', onMouseUp);
window.removeEventListener('resize', onWindowResize);
};
});
return (
<SlyledSplitView ref={splitPaneRef}>
{!disableTop && (
<TopSplitViewPanel topHeight={topHeight} setTopHeight={setTopHeight}>
{top}
<Divider ref={dividerRef} onMouseDown={onMouseDown}>
<DividerHandle />
</Divider>
</TopSplitViewPanel>
)}
{bottom}
</SlyledSplitView>
);
};

View File

@ -0,0 +1,385 @@
import { SessionIconButton } from '../icon';
import { animation, contextMenu, Item, Menu } from 'react-contexify';
import { InputItem } from '../../../session/utils/CallManager';
import { setFullScreenCall } from '../../../state/ducks/conversations';
import { CallManager, ToastUtils } from '../../../session/utils';
import React from 'react';
import { useDispatch, useSelector } from 'react-redux';
import { getHasOngoingCallWithPubkey } from '../../../state/selectors/conversations';
import { DropDownAndToggleButton } from '../icon/DropDownAndToggleButton';
import styled from 'styled-components';
const videoTriggerId = 'video-menu-trigger-id';
const audioTriggerId = 'audio-menu-trigger-id';
const audioOutputTriggerId = 'audio-output-menu-trigger-id';
export const VideoInputButton = ({
currentConnectedCameras,
localStreamVideoIsMuted,
hideArrowIcon = false,
}: {
currentConnectedCameras: Array<InputItem>;
localStreamVideoIsMuted: boolean;
hideArrowIcon?: boolean;
}) => {
return (
<>
<DropDownAndToggleButton
iconType="camera"
isMuted={localStreamVideoIsMuted}
onMainButtonClick={() => {
void handleCameraToggle(currentConnectedCameras, localStreamVideoIsMuted);
}}
onArrowClick={e => {
showVideoInputMenu(currentConnectedCameras, e);
}}
hidePopoverArrow={hideArrowIcon}
/>
<VideoInputMenu triggerId={videoTriggerId} camerasList={currentConnectedCameras} />
</>
);
};
export const AudioInputButton = ({
currentConnectedAudioInputs,
isAudioMuted,
hideArrowIcon = false,
}: {
currentConnectedAudioInputs: Array<InputItem>;
isAudioMuted: boolean;
hideArrowIcon?: boolean;
}) => {
return (
<>
<DropDownAndToggleButton
iconType="microphone"
isMuted={isAudioMuted}
onMainButtonClick={() => {
void handleMicrophoneToggle(currentConnectedAudioInputs, isAudioMuted);
}}
onArrowClick={e => {
showAudioInputMenu(currentConnectedAudioInputs, e);
}}
hidePopoverArrow={hideArrowIcon}
/>
<AudioInputMenu triggerId={audioTriggerId} audioInputsList={currentConnectedAudioInputs} />
</>
);
};
export const AudioOutputButton = ({
currentConnectedAudioOutputs,
isAudioOutputMuted,
hideArrowIcon = false,
}: {
currentConnectedAudioOutputs: Array<InputItem>;
isAudioOutputMuted: boolean;
hideArrowIcon?: boolean;
}) => {
return (
<>
<DropDownAndToggleButton
iconType="volume"
isMuted={isAudioOutputMuted}
onMainButtonClick={() => {
void handleSpeakerToggle(currentConnectedAudioOutputs, isAudioOutputMuted);
}}
onArrowClick={e => {
showAudioOutputMenu(currentConnectedAudioOutputs, e);
}}
hidePopoverArrow={hideArrowIcon}
/>
<AudioOutputMenu
triggerId={audioOutputTriggerId}
audioOutputsList={currentConnectedAudioOutputs}
/>
</>
);
};
const VideoInputMenu = ({
triggerId,
camerasList,
}: {
triggerId: string;
camerasList: Array<InputItem>;
}) => {
return (
<Menu id={triggerId} animation={animation.fade}>
{camerasList.map(m => {
return (
<Item
key={m.deviceId}
onClick={() => {
void CallManager.selectCameraByDeviceId(m.deviceId);
}}
>
{m.label.substr(0, 40)}
</Item>
);
})}
</Menu>
);
};
const AudioInputMenu = ({
triggerId,
audioInputsList,
}: {
triggerId: string;
audioInputsList: Array<InputItem>;
}) => {
return (
<Menu id={triggerId} animation={animation.fade}>
{audioInputsList.map(m => {
return (
<Item
key={m.deviceId}
onClick={() => {
void CallManager.selectAudioInputByDeviceId(m.deviceId);
}}
>
{m.label.substr(0, 40)}
</Item>
);
})}
</Menu>
);
};
const AudioOutputMenu = ({
triggerId,
audioOutputsList,
}: {
triggerId: string;
audioOutputsList: Array<InputItem>;
}) => {
return (
<Menu id={triggerId} animation={animation.fade}>
{audioOutputsList.map(m => {
return (
<Item
key={m.deviceId}
onClick={() => {
void CallManager.selectAudioOutputByDeviceId(m.deviceId);
}}
>
{m.label.substr(0, 40)}
</Item>
);
})}
</Menu>
);
};
const ShowInFullScreenButton = ({ isFullScreen }: { isFullScreen: boolean }) => {
const dispatch = useDispatch();
const showInFullScreen = () => {
if (isFullScreen) {
dispatch(setFullScreenCall(false));
} else {
dispatch(setFullScreenCall(true));
}
};
return (
<SessionIconButton
iconSize={60}
iconPadding="20px"
iconType="fullscreen"
backgroundColor="white"
borderRadius="50%"
onClick={showInFullScreen}
iconColor="black"
margin="10px"
/>
);
};
export const HangUpButton = () => {
const ongoingCallPubkey = useSelector(getHasOngoingCallWithPubkey);
const handleEndCall = async () => {
// call method to end call connection
if (ongoingCallPubkey) {
await CallManager.USER_hangup(ongoingCallPubkey);
}
};
return (
<SessionIconButton
iconSize={60}
iconPadding="20px"
iconType="hangup"
backgroundColor="white"
borderRadius="50%"
onClick={handleEndCall}
iconColor="red"
margin="10px"
/>
);
};
const showAudioInputMenu = (
currentConnectedAudioInputs: Array<any>,
e: React.MouseEvent<HTMLDivElement>
) => {
if (currentConnectedAudioInputs.length === 0) {
ToastUtils.pushNoAudioInputFound();
return;
}
contextMenu.show({
id: audioTriggerId,
event: e,
});
};
const showAudioOutputMenu = (
currentConnectedAudioOutputs: Array<any>,
e: React.MouseEvent<HTMLDivElement>
) => {
if (currentConnectedAudioOutputs.length === 0) {
ToastUtils.pushNoAudioOutputFound();
return;
}
contextMenu.show({
id: audioOutputTriggerId,
event: e,
});
};
const showVideoInputMenu = (
currentConnectedCameras: Array<InputItem>,
e: React.MouseEvent<HTMLDivElement>
) => {
if (currentConnectedCameras.length === 0) {
ToastUtils.pushNoCameraFound();
return;
}
contextMenu.show({
id: videoTriggerId,
event: e,
});
};
const handleCameraToggle = async (
currentConnectedCameras: Array<InputItem>,
localStreamVideoIsMuted: boolean
) => {
if (!currentConnectedCameras.length) {
ToastUtils.pushNoCameraFound();
return;
}
if (localStreamVideoIsMuted) {
// select the first one
await CallManager.selectCameraByDeviceId(currentConnectedCameras[0].deviceId);
} else {
await CallManager.selectCameraByDeviceId(CallManager.DEVICE_DISABLED_DEVICE_ID);
}
};
const handleMicrophoneToggle = async (
currentConnectedAudioInputs: Array<InputItem>,
isAudioMuted: boolean
) => {
if (!currentConnectedAudioInputs.length) {
ToastUtils.pushNoAudioInputFound();
return;
}
if (isAudioMuted) {
// selects the first one
await CallManager.selectAudioInputByDeviceId(currentConnectedAudioInputs[0].deviceId);
} else {
await CallManager.selectAudioInputByDeviceId(CallManager.DEVICE_DISABLED_DEVICE_ID);
}
};
const handleSpeakerToggle = async (
currentConnectedAudioOutputs: Array<InputItem>,
isAudioOutputMuted: boolean
) => {
if (!currentConnectedAudioOutputs.length) {
ToastUtils.pushNoAudioInputFound();
return;
}
if (isAudioOutputMuted) {
// selects the first one
await CallManager.selectAudioOutputByDeviceId(currentConnectedAudioOutputs[0].deviceId);
} else {
await CallManager.selectAudioOutputByDeviceId(CallManager.DEVICE_DISABLED_DEVICE_ID);
}
};
const StyledCallWindowControls = styled.div`
position: absolute;
bottom: 0px;
width: 100%;
height: 100%;
align-items: flex-end;
padding: 10px;
border-radius: 10px;
margin-left: auto;
margin-right: auto;
left: 0;
right: 0;
transition: all 0.25s ease-in-out;
display: flex;
justify-content: center;
opacity: 0;
&:hover {
opacity: 1;
}
`;
export const CallWindowControls = ({
currentConnectedCameras,
currentConnectedAudioInputs,
currentConnectedAudioOutputs,
isAudioMuted,
isAudioOutputMuted,
remoteStreamVideoIsMuted,
localStreamVideoIsMuted,
isFullScreen,
}: {
isAudioMuted: boolean;
isAudioOutputMuted: boolean;
localStreamVideoIsMuted: boolean;
remoteStreamVideoIsMuted: boolean;
currentConnectedAudioInputs: Array<InputItem>;
currentConnectedAudioOutputs: Array<InputItem>;
currentConnectedCameras: Array<InputItem>;
isFullScreen: boolean;
}) => {
return (
<StyledCallWindowControls>
{!remoteStreamVideoIsMuted && <ShowInFullScreenButton isFullScreen={isFullScreen} />}
<VideoInputButton
currentConnectedCameras={currentConnectedCameras}
localStreamVideoIsMuted={localStreamVideoIsMuted}
hideArrowIcon={isFullScreen}
/>
<AudioInputButton
currentConnectedAudioInputs={currentConnectedAudioInputs}
isAudioMuted={isAudioMuted}
hideArrowIcon={isFullScreen}
/>
<AudioOutputButton
currentConnectedAudioOutputs={currentConnectedAudioOutputs}
isAudioOutputMuted={isAudioOutputMuted}
hideArrowIcon={isFullScreen}
/>
<HangUpButton />
</StyledCallWindowControls>
);
};

View File

@ -0,0 +1,91 @@
import React, { useEffect } from 'react';
import { useDispatch, useSelector } from 'react-redux';
// tslint:disable-next-line: no-submodule-imports
import useKey from 'react-use/lib/useKey';
import styled from 'styled-components';
import { useVideoCallEventsListener } from '../../../hooks/useVideoEventListener';
import { setFullScreenCall } from '../../../state/ducks/conversations';
import {
getCallIsInFullScreen,
getHasOngoingCallWithFocusedConvo,
} from '../../../state/selectors/conversations';
import { CallWindowControls } from './CallButtons';
import { StyledVideoElement } from './DraggableCallContainer';
const CallInFullScreenVisible = styled.div`
position: absolute;
z-index: 9;
top: 0;
bottom: 0;
right: 0;
left: 0;
display: flex;
flex-direction: column;
background-color: black;
border: var(--session-border);
opacity: 1;
`;
export const CallInFullScreenContainer = () => {
const dispatch = useDispatch();
const ongoingCallWithFocused = useSelector(getHasOngoingCallWithFocusedConvo);
const hasOngoingCallFullScreen = useSelector(getCallIsInFullScreen);
const {
remoteStream,
remoteStreamVideoIsMuted,
currentConnectedAudioInputs,
currentConnectedAudioOutputs,
currentConnectedCameras,
isAudioMuted,
isAudioOutputMuted,
localStreamVideoIsMuted,
} = useVideoCallEventsListener('CallInFullScreenContainer', true);
const videoRefRemote = React.useRef<HTMLVideoElement>(null);
function toggleFullScreenOFF() {
dispatch(setFullScreenCall(false));
}
useKey('Escape', () => {
toggleFullScreenOFF();
});
useEffect(() => {
// close fullscreen mode if the remote video gets muted
if (remoteStreamVideoIsMuted) {
dispatch(setFullScreenCall(false));
}
}, [remoteStreamVideoIsMuted]);
if (!ongoingCallWithFocused || !hasOngoingCallFullScreen) {
return null;
}
if (videoRefRemote?.current) {
if (videoRefRemote.current.srcObject !== remoteStream) {
videoRefRemote.current.srcObject = remoteStream;
}
}
return (
<CallInFullScreenVisible onClick={toggleFullScreenOFF}>
<StyledVideoElement
ref={videoRefRemote}
autoPlay={true}
isVideoMuted={remoteStreamVideoIsMuted}
/>
<CallWindowControls
currentConnectedAudioInputs={currentConnectedAudioInputs}
currentConnectedAudioOutputs={currentConnectedAudioOutputs}
currentConnectedCameras={currentConnectedCameras}
isAudioMuted={isAudioMuted}
isAudioOutputMuted={isAudioOutputMuted}
localStreamVideoIsMuted={localStreamVideoIsMuted}
remoteStreamVideoIsMuted={remoteStreamVideoIsMuted}
isFullScreen={true}
/>
</CallInFullScreenVisible>
);
};

View File

@ -1,12 +1,9 @@
import React, { useCallback, useEffect, useRef, useState } from 'react';
import React, { useEffect, useRef, useState } from 'react';
import { useSelector } from 'react-redux';
import Draggable, { DraggableData, DraggableEvent } from 'react-draggable';
// tslint:disable-next-line: no-submodule-imports
import useMountedState from 'react-use/lib/useMountedState';
import styled from 'styled-components';
import _ from 'underscore';
import { CallManager } from '../../../session/utils';
import {
getHasOngoingCall,
getHasOngoingCallWith,
@ -14,12 +11,14 @@ import {
} from '../../../state/selectors/conversations';
import { openConversationWithMessages } from '../../../state/ducks/conversations';
import { Avatar, AvatarSize } from '../../Avatar';
import { getConversationController } from '../../../session/conversations';
import { useVideoCallEventsListener } from '../../../hooks/useVideoEventListener';
import { useAvatarPath, useConversationUsername } from '../../../hooks/useParamSelector';
import { VideoLoadingSpinner } from './InConversationCallContainer';
export const DraggableCallWindow = styled.div`
position: absolute;
z-index: 9;
box-shadow: var(--color-session-shadow);
box-shadow: 0px 0px 10px 0px #000000;
max-height: 300px;
width: 12vw;
display: flex;
@ -28,11 +27,11 @@ export const DraggableCallWindow = styled.div`
border: var(--session-border);
`;
export const StyledVideoElement = styled.video<{ isRemoteVideoMuted: boolean }>`
export const StyledVideoElement = styled.video<{ isVideoMuted: boolean }>`
padding: 0 1rem;
height: 100%;
width: 100%;
opacity: ${props => (props.isRemoteVideoMuted ? 0 : 1)};
opacity: ${props => (props.isVideoMuted ? 0 : 1)};
`;
const StyledDraggableVideoElement = styled(StyledVideoElement)`
@ -69,15 +68,21 @@ export const DraggableCallContainer = () => {
const selectedConversationKey = useSelector(getSelectedConversationKey);
const hasOngoingCall = useSelector(getHasOngoingCall);
const [positionX, setPositionX] = useState(window.innerWidth / 2);
const [positionY, setPositionY] = useState(window.innerHeight / 2);
// the draggable container has a width of 12vw, so we just set it's X to a bit more than this
const [positionX, setPositionX] = useState(window.innerWidth - (window.innerWidth * 1) / 6);
// 90 px is a bit below the conversation header height
const [positionY, setPositionY] = useState(90);
const [lastPositionX, setLastPositionX] = useState(0);
const [lastPositionY, setLastPositionY] = useState(0);
const [isRemoteVideoMuted, setIsRemoteVideoMuted] = useState(true);
const ongoingCallPubkey = ongoingCallProps?.id;
const videoRefRemote = useRef<any>(undefined);
const mountedState = useMountedState();
const { remoteStreamVideoIsMuted, remoteStream } = useVideoCallEventsListener(
'DraggableCallContainer',
false
);
const ongoingCallUsername = useConversationUsername(ongoingCallPubkey);
const avatarPath = useAvatarPath(ongoingCallPubkey);
const videoRefRemote = useRef<HTMLVideoElement>(null);
function onWindowResize() {
if (positionY + 50 > window.innerHeight || positionX + 50 > window.innerWidth) {
@ -94,45 +99,21 @@ export const DraggableCallContainer = () => {
};
}, [positionX, positionY]);
useEffect(() => {
if (ongoingCallPubkey !== selectedConversationKey) {
CallManager.setVideoEventsListener(
(
_localStream: MediaStream | null,
remoteStream: MediaStream | null,
_camerasList: any,
_audioList: any,
remoteVideoIsMuted: boolean
) => {
if (mountedState() && videoRefRemote?.current) {
videoRefRemote.current.srcObject = remoteStream;
setIsRemoteVideoMuted(remoteVideoIsMuted);
}
}
);
if (videoRefRemote?.current && remoteStream) {
if (videoRefRemote.current.srcObject !== remoteStream) {
videoRefRemote.current.srcObject = remoteStream;
}
}
return () => {
CallManager.setVideoEventsListener(null);
};
}, [ongoingCallPubkey, selectedConversationKey]);
const openCallingConversation = useCallback(() => {
const openCallingConversation = () => {
if (ongoingCallPubkey && ongoingCallPubkey !== selectedConversationKey) {
void openConversationWithMessages({ conversationKey: ongoingCallPubkey });
}
}, [ongoingCallPubkey, selectedConversationKey]);
};
if (!hasOngoingCall || !ongoingCallProps || ongoingCallPubkey === selectedConversationKey) {
return null;
}
const ongoingCallUsername = ongoingCallProps?.profileName || ongoingCallProps?.name;
const avatarPath = ongoingCallPubkey
? getConversationController()
.get(ongoingCallPubkey)
.getAvatarPath()
: undefined;
return (
<Draggable
@ -154,12 +135,13 @@ export const DraggableCallContainer = () => {
>
<DraggableCallWindow className="dragHandle">
<DraggableCallWindowInner>
<VideoLoadingSpinner fullWidth={true} />
<StyledDraggableVideoElement
ref={videoRefRemote}
autoPlay={true}
isRemoteVideoMuted={isRemoteVideoMuted}
isVideoMuted={remoteStreamVideoIsMuted}
/>
{isRemoteVideoMuted && (
{remoteStreamVideoIsMuted && (
<CenteredAvatarInDraggable>
<Avatar
size={AvatarSize.XL}

View File

@ -1,63 +1,47 @@
import React, { useEffect, useRef, useState } from 'react';
import React, { useRef } from 'react';
import { useSelector } from 'react-redux';
// tslint:disable-next-line: no-submodule-imports
import useMountedState from 'react-use/lib/useMountedState';
import styled from 'styled-components';
import _ from 'underscore';
import { CallManager, ToastUtils } from '../../../session/utils';
import { UserUtils } from '../../../session/utils';
import {
getHasOngoingCall,
getHasOngoingCallWith,
getSelectedConversationKey,
getHasOngoingCallWithFocusedConvo,
getHasOngoingCallWithFocusedConvoIsOffering,
getHasOngoingCallWithFocusedConvosIsConnecting,
getHasOngoingCallWithPubkey,
} from '../../../state/selectors/conversations';
import { SessionIconButton } from '../icon';
import { animation, contextMenu, Item, Menu } from 'react-contexify';
import { InputItem } from '../../../session/utils/CallManager';
import { DropDownAndToggleButton } from '../icon/DropDownAndToggleButton';
import { StyledVideoElement } from './CallContainer';
import { StyledVideoElement } from './DraggableCallContainer';
import { Avatar, AvatarSize } from '../../Avatar';
import { getConversationController } from '../../../session/conversations';
import { useVideoCallEventsListener } from '../../../hooks/useVideoEventListener';
import {
useAvatarPath,
useOurAvatarPath,
useOurConversationUsername,
} from '../../../hooks/useParamSelector';
import { useModuloWithTripleDots } from '../../../hooks/useModuloWithTripleDots';
import { CallWindowControls } from './CallButtons';
import { SessionSpinner } from '../SessionSpinner';
import { DEVICE_DISABLED_DEVICE_ID } from '../../../session/utils/CallManager';
// import { useCallAudioLevel } from '../../../hooks/useCallAudioLevel';
const VideoContainer = styled.div`
height: 100%;
width: 50%;
z-index: 0;
`;
const InConvoCallWindow = styled.div`
padding: 1rem;
display: flex;
height: 50%;
background-color: hsl(0, 0%, 15.7%);
flex-shrink: 0;
min-height: 200px;
flex-shrink: 1;
min-height: 80px;
align-items: center;
`;
const InConvoCallWindowControls = styled.div`
position: absolute;
bottom: 0px;
width: 100%;
height: 100%;
align-items: flex-end;
padding: 10px;
border-radius: 10px;
margin-left: auto;
margin-right: auto;
left: 0;
right: 0;
transition: all 0.25s ease-in-out;
display: flex;
justify-content: center;
opacity: 0;
&:hover {
opacity: 1;
}
flex-grow: 1;
`;
const RelativeCallWindow = styled.div`
@ -67,65 +51,10 @@ const RelativeCallWindow = styled.div`
flex-grow: 1;
`;
const VideoInputMenu = ({
triggerId,
camerasList,
onUnmute,
}: {
triggerId: string;
onUnmute: () => void;
camerasList: Array<InputItem>;
}) => {
return (
<Menu id={triggerId} animation={animation.fade}>
{camerasList.map(m => {
return (
<Item
key={m.deviceId}
onClick={() => {
onUnmute();
void CallManager.selectCameraByDeviceId(m.deviceId);
}}
>
{m.label.substr(0, 40)}
</Item>
);
})}
</Menu>
);
};
const AudioInputMenu = ({
triggerId,
audioInputsList,
onUnmute,
}: {
triggerId: string;
audioInputsList: Array<InputItem>;
onUnmute: () => void;
}) => {
return (
<Menu id={triggerId} animation={animation.fade}>
{audioInputsList.map(m => {
return (
<Item
key={m.deviceId}
onClick={() => {
onUnmute();
void CallManager.selectAudioInputByDeviceId(m.deviceId);
}}
>
{m.label.substr(0, 40)}
</Item>
);
})}
</Menu>
);
};
const CenteredAvatarInConversation = styled.div`
position: absolute;
top: 0;
top: -50%;
transform: translateY(-50%);
position: relative;
bottom: 0;
left: 0;
right: 50%;
@ -135,141 +64,130 @@ const CenteredAvatarInConversation = styled.div`
align-items: center;
`;
const StyledCenteredLabel = styled.div`
position: absolute;
left: 50%;
transform: translateX(-50%);
height: min-content;
white-space: nowrap;
color: white;
text-shadow: 0px 0px 8px white;
`;
const RingingLabel = () => {
const ongoingCallWithFocusedIsRinging = useSelector(getHasOngoingCallWithFocusedConvoIsOffering);
const modulatedStr = useModuloWithTripleDots(window.i18n('ringing'), 3, 1000);
if (!ongoingCallWithFocusedIsRinging) {
return null;
}
return <StyledCenteredLabel>{modulatedStr}</StyledCenteredLabel>;
};
const ConnectingLabel = () => {
const ongoingCallWithFocusedIsConnecting = useSelector(
getHasOngoingCallWithFocusedConvosIsConnecting
);
const modulatedStr = useModuloWithTripleDots(window.i18n('establishingConnection'), 3, 1000);
if (!ongoingCallWithFocusedIsConnecting) {
return null;
}
return <StyledCenteredLabel>{modulatedStr}</StyledCenteredLabel>;
};
const StyledSpinner = styled.div<{ fullWidth: boolean }>`
height: 100%;
width: ${props => (props.fullWidth ? '100%' : '50%')};
display: flex;
justify-content: center;
align-items: center;
position: absolute;
z-index: -1;
`;
export const VideoLoadingSpinner = (props: { fullWidth: boolean }) => {
return (
<StyledSpinner fullWidth={props.fullWidth}>
<SessionSpinner loading={true} />
</StyledSpinner>
);
};
// tslint:disable-next-line: max-func-body-length
export const InConversationCallContainer = () => {
const ongoingCallProps = useSelector(getHasOngoingCallWith);
const selectedConversationKey = useSelector(getSelectedConversationKey);
const hasOngoingCall = useSelector(getHasOngoingCall);
const [currentConnectedCameras, setCurrentConnectedCameras] = useState<Array<InputItem>>([]);
const [currentConnectedAudioInputs, setCurrentConnectedAudioInputs] = useState<Array<InputItem>>(
[]
);
const ongoingCallPubkey = ongoingCallProps?.id;
const ongoingCallPubkey = useSelector(getHasOngoingCallWithPubkey);
const ongoingCallWithFocused = useSelector(getHasOngoingCallWithFocusedConvo);
const ongoingCallUsername = ongoingCallProps?.profileName || ongoingCallProps?.name;
const videoRefRemote = useRef<any>();
const videoRefLocal = useRef<any>();
const mountedState = useMountedState();
const videoRefRemote = useRef<HTMLVideoElement>(null);
const videoRefLocal = useRef<HTMLVideoElement>(null);
const [isVideoMuted, setVideoMuted] = useState(true);
const [isRemoteVideoMuted, setIsRemoteVideoMuted] = useState(true);
const [isAudioMuted, setAudioMuted] = useState(false);
const ourPubkey = UserUtils.getOurPubKeyStrFromCache();
const videoTriggerId = 'video-menu-trigger-id';
const audioTriggerId = 'audio-menu-trigger-id';
const remoteAvatarPath = useAvatarPath(ongoingCallPubkey);
const ourAvatarPath = useOurAvatarPath();
const avatarPath = ongoingCallPubkey
? getConversationController()
.get(ongoingCallPubkey)
.getAvatarPath()
: undefined;
const ourUsername = useOurConversationUsername();
useEffect(() => {
if (ongoingCallPubkey === selectedConversationKey) {
CallManager.setVideoEventsListener(
(
localStream: MediaStream | null,
remoteStream: MediaStream | null,
camerasList: Array<InputItem>,
audioInputList: Array<InputItem>,
isRemoteVideoStreamMuted: boolean
) => {
if (mountedState() && videoRefRemote?.current && videoRefLocal?.current) {
videoRefLocal.current.srcObject = localStream;
setIsRemoteVideoMuted(isRemoteVideoStreamMuted);
videoRefRemote.current.srcObject = remoteStream;
const {
currentConnectedAudioInputs,
currentConnectedCameras,
currentConnectedAudioOutputs,
currentSelectedAudioOutput,
localStream,
localStreamVideoIsMuted,
remoteStream,
remoteStreamVideoIsMuted,
isAudioMuted,
isAudioOutputMuted,
} = useVideoCallEventsListener('InConversationCallContainer', true);
setCurrentConnectedCameras(camerasList);
setCurrentConnectedAudioInputs(audioInputList);
}
}
);
// const isSpeaking = useCallAudioLevel();
if (videoRefRemote?.current && videoRefLocal?.current) {
if (videoRefRemote.current.srcObject !== remoteStream) {
videoRefRemote.current.srcObject = remoteStream;
}
return () => {
CallManager.setVideoEventsListener(null);
};
}, [ongoingCallPubkey, selectedConversationKey]);
const handleEndCall = async () => {
// call method to end call connection
if (ongoingCallPubkey) {
await CallManager.USER_rejectIncomingCallRequest(ongoingCallPubkey);
}
};
const handleCameraToggle = async () => {
if (!currentConnectedCameras.length) {
ToastUtils.pushNoCameraFound();
return;
}
if (isVideoMuted) {
// select the first one
await CallManager.selectCameraByDeviceId(currentConnectedCameras[0].deviceId);
} else {
await CallManager.selectCameraByDeviceId(CallManager.INPUT_DISABLED_DEVICE_ID);
if (videoRefLocal.current.srcObject !== localStream) {
videoRefLocal.current.srcObject = localStream;
}
setVideoMuted(!isVideoMuted);
};
const handleMicrophoneToggle = async () => {
if (!currentConnectedAudioInputs.length) {
ToastUtils.pushNoAudioInputFound();
return;
}
if (isAudioMuted) {
// select the first one
await CallManager.selectAudioInputByDeviceId(currentConnectedAudioInputs[0].deviceId);
} else {
await CallManager.selectAudioInputByDeviceId(CallManager.INPUT_DISABLED_DEVICE_ID);
if (videoRefRemote.current) {
if (currentSelectedAudioOutput === DEVICE_DISABLED_DEVICE_ID) {
videoRefLocal.current.muted = true;
} else {
void videoRefLocal.current.setSinkId(currentSelectedAudioOutput);
videoRefLocal.current.muted = false;
}
}
}
setAudioMuted(!isAudioMuted);
};
const showAudioInputMenu = (e: React.MouseEvent<HTMLDivElement>) => {
if (currentConnectedAudioInputs.length === 0) {
ToastUtils.pushNoAudioInputFound();
return;
}
contextMenu.show({
id: audioTriggerId,
event: e,
});
};
const showVideoInputMenu = (e: React.MouseEvent<HTMLDivElement>) => {
if (currentConnectedCameras.length === 0) {
ToastUtils.pushNoCameraFound();
return;
}
contextMenu.show({
id: videoTriggerId,
event: e,
});
};
if (!hasOngoingCall || !ongoingCallProps || ongoingCallPubkey !== selectedConversationKey) {
if (!ongoingCallWithFocused) {
return null;
}
return (
<InConvoCallWindow>
<RelativeCallWindow>
<RingingLabel />
<ConnectingLabel />
<VideoContainer>
<VideoLoadingSpinner fullWidth={false} />
<StyledVideoElement
ref={videoRefRemote}
autoPlay={true}
isRemoteVideoMuted={isRemoteVideoMuted}
isVideoMuted={remoteStreamVideoIsMuted}
/>
{isRemoteVideoMuted && (
{remoteStreamVideoIsMuted && (
<CenteredAvatarInConversation>
<Avatar
size={AvatarSize.XL}
avatarPath={avatarPath}
avatarPath={remoteAvatarPath}
name={ongoingCallUsername}
pubkey={ongoingCallPubkey}
/>
@ -281,47 +199,29 @@ export const InConversationCallContainer = () => {
ref={videoRefLocal}
autoPlay={true}
muted={true}
isRemoteVideoMuted={false}
isVideoMuted={localStreamVideoIsMuted}
/>
{localStreamVideoIsMuted && (
<CenteredAvatarInConversation>
<Avatar
size={AvatarSize.XL}
avatarPath={ourAvatarPath}
name={ourUsername}
pubkey={ourPubkey}
/>
</CenteredAvatarInConversation>
)}
</VideoContainer>
<InConvoCallWindowControls>
<SessionIconButton
iconSize={60}
iconPadding="20px"
iconType="hangup"
backgroundColor="white"
borderRadius="50%"
onClick={handleEndCall}
iconColor="red"
margin="10px"
/>
<DropDownAndToggleButton
iconType="camera"
isMuted={isVideoMuted}
onMainButtonClick={handleCameraToggle}
onArrowClick={showVideoInputMenu}
/>
<DropDownAndToggleButton
iconType="microphone"
isMuted={isAudioMuted}
onMainButtonClick={handleMicrophoneToggle}
onArrowClick={showAudioInputMenu}
/>
</InConvoCallWindowControls>
<VideoInputMenu
triggerId={videoTriggerId}
onUnmute={() => {
setVideoMuted(false);
}}
camerasList={currentConnectedCameras}
/>
<AudioInputMenu
triggerId={audioTriggerId}
onUnmute={() => {
setAudioMuted(false);
}}
audioInputsList={currentConnectedAudioInputs}
<CallWindowControls
currentConnectedAudioInputs={currentConnectedAudioInputs}
currentConnectedCameras={currentConnectedCameras}
isAudioMuted={isAudioMuted}
currentConnectedAudioOutputs={currentConnectedAudioOutputs}
isAudioOutputMuted={isAudioOutputMuted}
localStreamVideoIsMuted={localStreamVideoIsMuted}
remoteStreamVideoIsMuted={remoteStreamVideoIsMuted}
isFullScreen={false}
/>
</RelativeCallWindow>
</InConvoCallWindow>

View File

@ -1,10 +1,13 @@
import React from 'react';
import React, { useEffect } from 'react';
import { useSelector } from 'react-redux';
import styled from 'styled-components';
import _ from 'underscore';
import { useAvatarPath, useConversationUsername } from '../../../hooks/useParamSelector';
import { ed25519Str } from '../../../session/onions/onionPath';
import { CallManager } from '../../../session/utils';
import { getHasIncomingCall, getHasIncomingCallFrom } from '../../../state/selectors/conversations';
import { Avatar, AvatarSize } from '../../Avatar';
import { SessionButton, SessionButtonColor } from '../SessionButton';
import { SessionWrapperModal } from '../SessionWrapperModal';
@ -21,38 +24,74 @@ export const CallWindow = styled.div`
border: var(--session-border);
`;
// TODO:
/**
* Add mute input, deafen, end call, possibly add person to call
* duration - look at how duration calculated for recording.
*/
const IncomingCallAvatatContainer = styled.div`
padding: 0 0 2rem 0;
`;
const timeoutMs = 60000;
export const IncomingCallDialog = () => {
const hasIncomingCall = useSelector(getHasIncomingCall);
const incomingCallProps = useSelector(getHasIncomingCallFrom);
const incomingCallFromPubkey = useSelector(getHasIncomingCallFrom);
useEffect(() => {
let timeout: NodeJS.Timeout;
if (incomingCallFromPubkey) {
timeout = global.setTimeout(async () => {
if (incomingCallFromPubkey) {
window.log.info(
`call missed with ${ed25519Str(
incomingCallFromPubkey
)} as dialog was not interacted with for ${timeoutMs} ms`
);
await CallManager.USER_rejectIncomingCallRequest(incomingCallFromPubkey);
}
}, timeoutMs);
}
return () => {
if (timeout) {
global.clearTimeout(timeout);
}
};
}, [incomingCallFromPubkey]);
//#region input handlers
const handleAcceptIncomingCall = async () => {
if (incomingCallProps?.id) {
await CallManager.USER_acceptIncomingCallRequest(incomingCallProps.id);
if (incomingCallFromPubkey) {
await CallManager.USER_acceptIncomingCallRequest(incomingCallFromPubkey);
}
};
const handleDeclineIncomingCall = async () => {
// close the modal
if (incomingCallProps?.id) {
await CallManager.USER_rejectIncomingCallRequest(incomingCallProps.id);
if (incomingCallFromPubkey) {
await CallManager.USER_rejectIncomingCallRequest(incomingCallFromPubkey);
}
};
const from = useConversationUsername(incomingCallFromPubkey);
const incomingAvatar = useAvatarPath(incomingCallFromPubkey);
if (!hasIncomingCall) {
return null;
}
if (hasIncomingCall) {
return (
<SessionWrapperModal title={window.i18n('incomingCall')}>
<SessionWrapperModal title={window.i18n('incomingCallFrom', from)}>
<IncomingCallAvatatContainer>
<Avatar
size={AvatarSize.XL}
avatarPath={incomingAvatar}
name={from}
pubkey={incomingCallFromPubkey}
/>
</IncomingCallAvatatContainer>
<div className="session-modal__button-group">
<SessionButton text={window.i18n('decline')} onClick={handleDeclineIncomingCall} />
<SessionButton
text={window.i18n('decline')}
buttonColor={SessionButtonColor.Danger}
onClick={handleDeclineIncomingCall}
/>
<SessionButton
text={window.i18n('accept')}
onClick={handleAcceptIncomingCall}

View File

@ -3,10 +3,10 @@ import React from 'react';
import classNames from 'classnames';
import {
CompositionBox,
SendMessageType,
SessionCompositionBox,
StagedAttachmentType,
} from './SessionCompositionBox';
} from './composition/CompositionBox';
import { Constants } from '../../../session';
import _ from 'lodash';
@ -14,7 +14,6 @@ import { AttachmentUtil, GoogleChrome } from '../../../util';
import { ConversationHeaderWithDetails } from '../../conversation/ConversationHeader';
import { SessionRightPanelWithDetails } from './SessionRightPanel';
import { SessionTheme } from '../../../state/ducks/SessionTheme';
import styled from 'styled-components';
import { SessionMessagesListContainer } from './SessionMessagesListContainer';
import { LightboxGallery, MediaItemType } from '../../LightboxGallery';
@ -34,19 +33,14 @@ import { MessageDetail } from '../../conversation/MessageDetail';
import { getConversationController } from '../../../session/conversations';
import { getPubkeysInPublicConversation } from '../../../data/data';
import autoBind from 'auto-bind';
import { useSelector } from 'react-redux';
import {
getFirstUnreadMessageId,
isFirstUnreadMessageIdAbove,
} from '../../../state/selectors/conversations';
import { SessionButtonColor } from '../SessionButton';
import { updateConfirmModal } from '../../../state/ducks/modalDialog';
import { addStagedAttachmentsInConversation } from '../../../state/ducks/stagedAttachments';
import { InConversationCallContainer } from '../calling/InConversationCallContainer';
import { SplitViewContainer } from '../SplitViewContainer';
// tslint:disable: jsx-curly-spacing
interface State {
showRecordingView: boolean;
isDraggingFile: boolean;
}
export interface LightBoxOptions {
@ -62,6 +56,7 @@ interface Props {
selectedMessages: Array<string>;
showMessageDetails: boolean;
isRightPanelShowing: boolean;
hasOngoingCallWithFocusedConvo: boolean;
// lightbox options
lightBoxOptions?: LightBoxOptions;
@ -69,30 +64,6 @@ interface Props {
stagedAttachments: Array<StagedAttachmentType>;
}
const SessionUnreadAboveIndicator = styled.div`
position: sticky;
top: 0;
margin: 1em;
display: flex;
justify-content: center;
background: var(--color-sent-message-background);
color: var(--color-sent-message-text);
`;
const UnreadAboveIndicator = () => {
const isFirstUnreadAbove = useSelector(isFirstUnreadMessageIdAbove);
const firstUnreadMessageId = useSelector(getFirstUnreadMessageId) as string;
if (!isFirstUnreadAbove) {
return null;
}
return (
<SessionUnreadAboveIndicator key={`above-unread-indicator-${firstUnreadMessageId}`}>
{window.i18n('latestUnreadIsAbove')}
</SessionUnreadAboveIndicator>
);
};
export class SessionConversation extends React.Component<Props, State> {
private readonly messageContainerRef: React.RefObject<HTMLDivElement>;
private dragCounter: number;
@ -103,7 +74,6 @@ export class SessionConversation extends React.Component<Props, State> {
super(props);
this.state = {
showRecordingView: false,
isDraggingFile: false,
};
this.messageContainerRef = React.createRef();
@ -163,7 +133,6 @@ export class SessionConversation extends React.Component<Props, State> {
}
if (newConversationKey !== oldConversationKey) {
this.setState({
showRecordingView: false,
isDraggingFile: false,
});
}
@ -227,7 +196,7 @@ export class SessionConversation extends React.Component<Props, State> {
// ~~~~~~~~~~~~~~ RENDER METHODS ~~~~~~~~~~~~~~
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
public render() {
const { showRecordingView, isDraggingFile } = this.state;
const { isDraggingFile } = this.state;
const {
selectedConversation,
@ -264,20 +233,20 @@ export class SessionConversation extends React.Component<Props, State> {
{lightBoxOptions?.media && this.renderLightBox(lightBoxOptions)}
<div className="conversation-messages">
<InConversationCallContainer />
<UnreadAboveIndicator />
<SplitViewContainer
top={<InConversationCallContainer />}
bottom={
<SessionMessagesListContainer messageContainerRef={this.messageContainerRef} />
}
disableTop={!this.props.hasOngoingCallWithFocusedConvo}
/>
<SessionMessagesListContainer messageContainerRef={this.messageContainerRef} />
{showRecordingView && <div className="conversation-messages__blocking-overlay" />}
{isDraggingFile && <SessionFileDropzone />}
</div>
<SessionCompositionBox
<CompositionBox
sendMessage={this.sendMessageFn}
stagedAttachments={this.props.stagedAttachments}
onLoadVoiceNoteView={this.onLoadVoiceNoteView}
onExitVoiceNoteView={this.onExitVoiceNoteView}
onChoseAttachments={this.onChoseAttachments}
/>
</div>
@ -290,35 +259,12 @@ export class SessionConversation extends React.Component<Props, State> {
);
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// ~~~~~~~~~~~~ MICROPHONE METHODS ~~~~~~~~~~~~
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
private onLoadVoiceNoteView() {
this.setState({
showRecordingView: true,
});
window.inboxStore?.dispatch(resetSelectedMessageIds());
}
private onExitVoiceNoteView() {
this.setState({
showRecordingView: false,
});
}
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
// ~~~~~~~~~~~ KEYBOARD NAVIGATION ~~~~~~~~~~~~
// ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
private onKeyDown(event: any) {
const selectionMode = !!this.props.selectedMessages.length;
const recordingMode = this.state.showRecordingView;
if (event.key === 'Escape') {
// EXIT MEDIA VIEW
if (recordingMode) {
// EXIT RECORDING VIEW
}
// EXIT WHAT ELSE?
}
if (event.target.classList.contains('conversation-content')) {
switch (event.key) {
case 'Escape':

View File

@ -0,0 +1,19 @@
// keep this draft state local to not have to do a redux state update (a bit slow with our large state for some computers)
const draftsForConversations: Record<string, string> = {};
export function getDraftForConversation(conversationKey?: string) {
if (!conversationKey || !draftsForConversations[conversationKey]) {
return '';
}
return draftsForConversations[conversationKey] || '';
}
export function updateDraftForConversation({
conversationKey,
draft,
}: {
conversationKey: string;
draft: string;
}) {
draftsForConversations[conversationKey] = draft;
}

View File

@ -18,7 +18,7 @@ import { getMessagesBySentAt } from '../../../data/data';
import autoBind from 'auto-bind';
import { ConversationTypeEnum } from '../../../models/conversation';
import { StateType } from '../../../state/reducer';
import { connect } from 'react-redux';
import { connect, useSelector } from 'react-redux';
import {
getFirstUnreadMessageId,
getQuotedMessageToAnimate,
@ -26,13 +26,39 @@ import {
getSelectedConversationKey,
getShowScrollButton,
getSortedMessagesOfSelectedConversation,
isFirstUnreadMessageIdAbove,
} from '../../../state/selectors/conversations';
import { SessionMessagesList } from './SessionMessagesList';
import styled from 'styled-components';
export type SessionMessageListProps = {
messageContainerRef: React.RefObject<HTMLDivElement>;
};
const SessionUnreadAboveIndicator = styled.div`
position: sticky;
top: 0;
margin: 1em;
display: flex;
justify-content: center;
background: var(--color-sent-message-background);
color: var(--color-sent-message-text);
`;
const UnreadAboveIndicator = () => {
const isFirstUnreadAbove = useSelector(isFirstUnreadMessageIdAbove);
const firstUnreadMessageId = useSelector(getFirstUnreadMessageId) as string;
if (!isFirstUnreadAbove) {
return null;
}
return (
<SessionUnreadAboveIndicator key={`above-unread-indicator-${firstUnreadMessageId}`}>
{window.i18n('latestUnreadIsAbove')}
</SessionUnreadAboveIndicator>
);
};
type Props = SessionMessageListProps & {
conversationKey?: string;
messagesProps: Array<SortedMessageModelProps>;
@ -139,6 +165,8 @@ class SessionMessagesListContainerInner extends React.Component<Props> {
onScroll={this.handleScroll}
ref={this.props.messageContainerRef}
>
<UnreadAboveIndicator />
<TypingBubble
pubkey={conversationKey}
conversationType={conversation.type}

View File

@ -10,9 +10,9 @@ import MicRecorder from 'mic-recorder-to-mp3';
import styled from 'styled-components';
interface Props {
onExitVoiceNoteView: any;
onLoadVoiceNoteView: any;
sendVoiceMessage: any;
onExitVoiceNoteView: () => void;
onLoadVoiceNoteView: () => void;
sendVoiceMessage: (audioBlob: Blob) => Promise<void>;
}
interface State {
@ -24,13 +24,10 @@ interface State {
actionHover: boolean;
startTimestamp: number;
nowTimestamp: number;
updateTimerInterval: NodeJS.Timeout;
}
function getTimestamp(asInt = false) {
const timestamp = Date.now() / 1000;
return asInt ? Math.floor(timestamp) : timestamp;
function getTimestamp() {
return Date.now() / 1000;
}
interface StyledFlexWrapperProps {
@ -50,20 +47,16 @@ const StyledFlexWrapper = styled.div<StyledFlexWrapperProps>`
}
`;
class SessionRecordingInner extends React.Component<Props, State> {
private recorder: any;
export class SessionRecording extends React.Component<Props, State> {
private recorder?: any;
private audioBlobMp3?: Blob;
private audioElement?: HTMLAudioElement | null;
private updateTimerInterval?: NodeJS.Timeout;
constructor(props: Props) {
super(props);
autoBind(this);
// Refs
const now = getTimestamp();
const updateTimerInterval = global.setInterval(this.timerUpdate, 500);
this.state = {
recordDuration: 0,
@ -73,7 +66,6 @@ class SessionRecordingInner extends React.Component<Props, State> {
actionHover: false,
startTimestamp: now,
nowTimestamp: now,
updateTimerInterval,
};
}
@ -86,10 +78,13 @@ class SessionRecordingInner extends React.Component<Props, State> {
if (this.props.onLoadVoiceNoteView) {
this.props.onLoadVoiceNoteView();
}
this.updateTimerInterval = global.setInterval(this.timerUpdate, 500);
}
public componentWillUnmount() {
clearInterval(this.state.updateTimerInterval);
if (this.updateTimerInterval) {
clearInterval(this.updateTimerInterval);
}
}
// tslint:disable-next-line: cyclomatic-complexity
@ -276,7 +271,7 @@ class SessionRecordingInner extends React.Component<Props, State> {
return;
}
this.props.sendVoiceMessage(this.audioBlobMp3);
void this.props.sendVoiceMessage(this.audioBlobMp3);
}
private async initiateRecordingStream() {
@ -348,5 +343,3 @@ class SessionRecordingInner extends React.Component<Props, State> {
}
}
}
export const SessionRecording = SessionRecordingInner;

View File

@ -1,7 +1,7 @@
import React from 'react';
import { arrayBufferFromFile } from '../../../types/Attachment';
import { AttachmentUtil, LinkPreviewUtil } from '../../../util';
import { StagedLinkPreviewData } from './SessionCompositionBox';
import { StagedLinkPreviewData } from './composition/CompositionBox';
import { default as insecureNodeFetch } from 'node-fetch';
import { fetchLinkPreviewImage } from '../../../util/linkPreviewFetch';
import { AbortSignal } from 'abort-controller';
@ -107,7 +107,7 @@ export const getPreview = async (
export const SessionStagedLinkPreview = (props: StagedLinkPreviewProps) => {
if (!props.url) {
return <></>;
return null;
}
return (

View File

@ -1,56 +1,52 @@
import React from 'react';
import _, { debounce } from 'lodash';
import { AttachmentType } from '../../../types/Attachment';
import * as MIME from '../../../types/MIME';
import { AttachmentType } from '../../../../types/Attachment';
import * as MIME from '../../../../types/MIME';
import { SessionIconButton } from '../icon';
import { SessionEmojiPanel } from './SessionEmojiPanel';
import { SessionRecording } from './SessionRecording';
import { SessionEmojiPanel } from '../SessionEmojiPanel';
import { SessionRecording } from '../SessionRecording';
import { Constants } from '../../../session';
import { Constants } from '../../../../session';
import { toArray } from 'react-emoji-render';
import { Flex } from '../../basic/Flex';
import { StagedAttachmentList } from '../../conversation/StagedAttachmentList';
import { ToastUtils } from '../../../session/utils';
import { AttachmentUtil } from '../../../util';
import { Flex } from '../../../basic/Flex';
import { StagedAttachmentList } from '../../../conversation/StagedAttachmentList';
import { ToastUtils } from '../../../../session/utils';
import { AttachmentUtil } from '../../../../util';
import {
getPreview,
LINK_PREVIEW_TIMEOUT,
SessionStagedLinkPreview,
} from './SessionStagedLinkPreview';
} from '../SessionStagedLinkPreview';
import { AbortController } from 'abort-controller';
import { SessionQuotedMessageComposition } from './SessionQuotedMessageComposition';
import { SessionQuotedMessageComposition } from '../SessionQuotedMessageComposition';
import { Mention, MentionsInput } from 'react-mentions';
import { CaptionEditor } from '../../CaptionEditor';
import { getConversationController } from '../../../session/conversations';
import { ReduxConversationType } from '../../../state/ducks/conversations';
import { SessionMemberListItem } from '../SessionMemberListItem';
import { CaptionEditor } from '../../../CaptionEditor';
import { getConversationController } from '../../../../session/conversations';
import { ReduxConversationType } from '../../../../state/ducks/conversations';
import { SessionMemberListItem } from '../../SessionMemberListItem';
import autoBind from 'auto-bind';
import { getMediaPermissionsSettings, SessionSettingCategory } from '../settings/SessionSettings';
import { updateConfirmModal } from '../../../state/ducks/modalDialog';
import {
SectionType,
showLeftPaneSection,
showSettingsSection,
} from '../../../state/ducks/section';
import { SessionButtonColor } from '../SessionButton';
import {
createOrUpdateItem,
getItemById,
hasLinkPreviewPopupBeenDisplayed,
} from '../../../data/data';
import { getMediaPermissionsSettings } from '../../settings/SessionSettings';
import {
getIsTypingEnabled,
getMentionsInput,
getQuotedMessage,
getSelectedConversation,
getSelectedConversationKey,
} from '../../../state/selectors/conversations';
} from '../../../../state/selectors/conversations';
import { connect } from 'react-redux';
import { StateType } from '../../../state/reducer';
import { getTheme } from '../../../state/selectors/theme';
import { removeAllStagedAttachmentsInConversation } from '../../../state/ducks/stagedAttachments';
import { StateType } from '../../../../state/reducer';
import { getTheme } from '../../../../state/selectors/theme';
import { removeAllStagedAttachmentsInConversation } from '../../../../state/ducks/stagedAttachments';
import { getDraftForConversation, updateDraftForConversation } from '../SessionConversationDrafts';
import { showLinkSharingConfirmationModalDialog } from '../../../../interactions/conversationInteractions';
import {
AddStagedAttachmentButton,
SendMessageButton,
StartRecordingButton,
ToggleEmojiButton,
} from './CompositionButtons';
export interface ReplyingToMessageProps {
convoId: string;
@ -83,79 +79,11 @@ export type SendMessageType = {
groupInvitation: { url: string | undefined; name: string } | undefined;
};
const AddStagedAttachmentButton = (props: { onClick: () => void }) => {
return (
<SessionIconButton
iconType="plusThin"
backgroundColor={'var(--color-compose-view-button-background)'}
iconSize={'huge2'}
borderRadius="300px"
iconPadding="8px"
onClick={props.onClick}
/>
);
};
const StartRecordingButton = (props: { onClick: () => void }) => {
return (
<SessionIconButton
iconType="microphone"
iconSize={'huge2'}
backgroundColor={'var(--color-compose-view-button-background)'}
borderRadius="300px"
iconPadding="6px"
onClick={props.onClick}
/>
);
};
const ToggleEmojiButton = React.forwardRef<HTMLDivElement, { onClick: () => void }>(
(props, ref) => {
return (
<SessionIconButton
iconType="emoji"
ref={ref}
backgroundColor="var(--color-compose-view-button-background)"
iconSize={'huge2'}
borderRadius="300px"
iconPadding="6px"
onClick={props.onClick}
/>
);
}
);
const SendMessageButton = (props: { onClick: () => void }) => {
return (
<div className="send-message-button">
<SessionIconButton
iconType="send"
backgroundColor={'var(--color-compose-view-button-background)'}
iconSize={'huge2'}
iconRotation={90}
borderRadius="300px"
iconPadding="6px"
onClick={props.onClick}
/>
</div>
);
};
// keep this draft state local to not have to do a redux state update (a bit slow with our large state for soem computers)
const draftsForConversations: Array<{ conversationKey: string; draft: string }> = new Array();
function updateDraftForConversation(action: { conversationKey: string; draft: string }) {
const { conversationKey, draft } = action;
const foundAtIndex = draftsForConversations.findIndex(c => c.conversationKey === conversationKey);
foundAtIndex === -1
? draftsForConversations.push({ conversationKey, draft })
: (draftsForConversations[foundAtIndex] = action);
}
interface Props {
sendMessage: (msg: SendMessageType) => void;
onLoadVoiceNoteView: any;
onExitVoiceNoteView: any;
selectedConversationKey: string;
selectedConversation: ReduxConversationType | undefined;
typingEnabled: boolean;
quotedMessageProps?: ReplyingToMessageProps;
stagedAttachments: Array<StagedAttachmentType>;
onChoseAttachments: (newAttachments: Array<File>) => void;
@ -165,8 +93,7 @@ interface State {
showRecordingView: boolean;
draft: string;
showEmojiPanel: boolean;
voiceRecording?: Blob;
ignoredLink?: string; // set the the ignored url when users closed the link preview
ignoredLink?: string; // set the ignored url when users closed the link preview
stagedLinkPreview?: StagedLinkPreviewData;
showCaptionEditor?: AttachmentType;
}
@ -191,12 +118,10 @@ const sendMessageStyle = {
minHeight: '24px',
width: '100%',
};
const getDefaultState = (newConvoId?: string) => {
return {
draft:
(newConvoId && draftsForConversations.find(c => c.conversationKey === newConvoId)?.draft) ||
'',
voiceRecording: undefined,
draft: getDraftForConversation(newConvoId),
showRecordingView: false,
showEmojiPanel: false,
ignoredLink: undefined,
@ -205,14 +130,84 @@ const getDefaultState = (newConvoId?: string) => {
};
};
class SessionCompositionBoxInner extends React.Component<Props, State> {
function parseEmojis(value: string) {
const emojisArray = toArray(value);
// toArray outputs React elements for emojis and strings for other
return emojisArray.reduce((previous: string, current: any) => {
if (typeof current === 'string') {
return previous + current;
}
return previous + (current.props.children as string);
}, '');
}
const mentionsRegex = /@\uFFD205[0-9a-f]{64}\uFFD7[^\uFFD2]+\uFFD2/gu;
const getSelectionBasedOnMentions = (draft: string, index: number) => {
// we have to get the real selectionStart/end of an index in the mentions box.
// this is kind of a pain as the mentions box has two inputs, one with the real text, and one with the extracted mentions
// the index shown to the user is actually just the visible part of the mentions (so the part between ᅲ...ᅭ
const matches = draft.match(mentionsRegex);
let lastMatchStartIndex = 0;
let lastMatchEndIndex = 0;
let lastRealMatchEndIndex = 0;
if (!matches) {
return index;
}
const mapStartToLengthOfMatches = matches.map(match => {
const displayNameStart = match.indexOf('\uFFD7') + 1;
const displayNameEnd = match.lastIndexOf('\uFFD2');
const displayName = match.substring(displayNameStart, displayNameEnd);
const currentMatchStartIndex = draft.indexOf(match) + lastMatchStartIndex;
lastMatchStartIndex = currentMatchStartIndex;
lastMatchEndIndex = currentMatchStartIndex + match.length;
const realLength = displayName.length + 1;
lastRealMatchEndIndex = lastRealMatchEndIndex + realLength;
// the +1 is for the @
return {
length: displayName.length + 1,
lastRealMatchEndIndex,
start: lastMatchStartIndex,
end: lastMatchEndIndex,
};
});
const beforeFirstMatch = index < mapStartToLengthOfMatches[0].start;
if (beforeFirstMatch) {
// those first char are always just char, so the mentions logic does not come into account
return index;
}
const lastMatchMap = _.last(mapStartToLengthOfMatches);
if (!lastMatchMap) {
return Number.MAX_SAFE_INTEGER;
}
const indexIsAfterEndOfLastMatch = lastMatchMap.lastRealMatchEndIndex <= index;
if (indexIsAfterEndOfLastMatch) {
const lastEnd = lastMatchMap.end;
const diffBetweenEndAndLastRealEnd = index - lastMatchMap.lastRealMatchEndIndex;
return lastEnd + diffBetweenEndAndLastRealEnd - 1;
}
// now this is the hard part, the cursor is currently between the end of the first match and the start of the last match
// for now, just append it to the end
return Number.MAX_SAFE_INTEGER;
};
class CompositionBoxInner extends React.Component<Props, State> {
private readonly textarea: React.RefObject<any>;
private readonly fileInput: React.RefObject<HTMLInputElement>;
private readonly emojiPanel: any;
private readonly emojiPanel: React.RefObject<HTMLDivElement>;
private readonly emojiPanelButton: any;
private linkPreviewAbortController?: AbortController;
private container: any;
private readonly mentionsRegex = /@\uFFD205[0-9a-f]{64}\uFFD7[^\uFFD2]+\uFFD2/gu;
private container: HTMLDivElement | null;
private lastBumpTypingMessageLength: number = 0;
constructor(props: any) {
@ -222,6 +217,7 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
this.textarea = React.createRef();
this.fileInput = React.createRef();
this.container = null;
// Emojis
this.emojiPanel = React.createRef();
this.emojiPanelButton = React.createRef();
@ -286,10 +282,13 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
this.hideEmojiPanel();
}
private handlePaste(e: any) {
private handlePaste(e: ClipboardEvent) {
if (!e.clipboardData) {
return;
}
const { items } = e.clipboardData;
let imgBlob = null;
for (const item of items) {
for (const item of items as any) {
const pasteType = item.type.split('/')[0];
if (pasteType === 'image') {
imgBlob = item.getAsFile();
@ -300,7 +299,7 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
imgBlob = item.getAsFile();
break;
case 'text':
void this.showLinkSharingConfirmationModalDialog(e);
void showLinkSharingConfirmationModalDialog(e);
break;
default:
}
@ -315,47 +314,6 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
}
}
/**
* Check if what is pasted is a URL and prompt confirmation for a setting change
* @param e paste event
*/
private async showLinkSharingConfirmationModalDialog(e: any) {
const pastedText = e.clipboardData.getData('text');
if (this.isURL(pastedText) && !window.getSettingValue('link-preview-setting', false)) {
const alreadyDisplayedPopup =
(await getItemById(hasLinkPreviewPopupBeenDisplayed))?.value || false;
if (!alreadyDisplayedPopup) {
window.inboxStore?.dispatch(
updateConfirmModal({
shouldShowConfirm:
!window.getSettingValue('link-preview-setting') && !alreadyDisplayedPopup,
title: window.i18n('linkPreviewsTitle'),
message: window.i18n('linkPreviewsConfirmMessage'),
okTheme: SessionButtonColor.Danger,
onClickOk: () => {
window.setSettingValue('link-preview-setting', true);
},
onClickClose: async () => {
await createOrUpdateItem({ id: hasLinkPreviewPopupBeenDisplayed, value: true });
},
})
);
}
}
}
/**
*
* @param str String to evaluate
* @returns boolean if the string is true or false
*/
private isURL(str: string) {
const urlRegex =
'^(?!mailto:)(?:(?:http|https|ftp)://)(?:\\S+(?::\\S*)?@)?(?:(?:(?:[1-9]\\d?|1\\d\\d|2[01]\\d|22[0-3])(?:\\.(?:1?\\d{1,2}|2[0-4]\\d|25[0-5])){2}(?:\\.(?:[0-9]\\d?|1\\d\\d|2[0-4]\\d|25[0-4]))|(?:(?:[a-z\\u00a1-\\uffff0-9]+-?)*[a-z\\u00a1-\\uffff0-9]+)(?:\\.(?:[a-z\\u00a1-\\uffff0-9]+-?)*[a-z\\u00a1-\\uffff0-9]+)*(?:\\.(?:[a-z\\u00a1-\\uffff]{2,})))|localhost)(?::\\d{2,5})?(?:(/|\\?|#)[^\\s]*)?$';
const url = new RegExp(urlRegex, 'i');
return str.length < 2083 && url.test(str);
}
private showEmojiPanel() {
document.addEventListener('mousedown', this.handleClick, false);
@ -390,18 +348,9 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
);
}
private isTypingEnabled(): boolean {
if (!this.props.selectedConversation) {
return false;
}
const { isBlocked, isKickedFromGroup, left } = this.props.selectedConversation;
return !(isBlocked || isKickedFromGroup || left);
}
private renderCompositionView() {
const { showEmojiPanel } = this.state;
const typingEnabled = this.isTypingEnabled();
const { typingEnabled } = this.props;
return (
<>
@ -463,7 +412,7 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
: isBlocked && !isPrivate
? i18n('unblockGroupToSend')
: i18n('sendMessage');
const typingEnabled = this.isTypingEnabled();
const { typingEnabled } = this.props;
let index = 0;
return (
@ -478,7 +427,7 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
disabled={!typingEnabled}
rows={1}
style={sendMessageStyle}
suggestionsPortalHost={this.container}
suggestionsPortalHost={this.container as any}
forceSuggestionsAboveCursor={true} // force mentions to be rendered on top of the cursor, this is working with a fork of react-mentions for now
>
<Mention
@ -585,10 +534,10 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
callback(mentionsData);
}
private renderStagedLinkPreview(): JSX.Element {
private renderStagedLinkPreview(): JSX.Element | null {
// Don't generate link previews if user has turned them off
if (!(window.getSettingValue('link-preview-setting') || false)) {
return <></>;
return null;
}
const { stagedAttachments, quotedMessageProps } = this.props;
@ -596,7 +545,7 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
// Don't render link previews if quoted message or attachments are already added
if (stagedAttachments.length !== 0 || quotedMessageProps?.id) {
return <></>;
return null;
}
// we try to match the first link found in the current message
const links = window.Signal.LinkPreviews.findLinks(this.state.draft, undefined);
@ -606,7 +555,7 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
stagedLinkPreview: undefined,
});
}
return <></>;
return null;
}
const firstLink = links[0];
// if the first link changed, reset the ignored link so that the preview is generated
@ -620,7 +569,7 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
// if the fetch did not start yet, just don't show anything
if (!this.state.stagedLinkPreview) {
return <></>;
return null;
}
const { isLoaded, title, description, domain, image } = this.state.stagedLinkPreview;
@ -767,7 +716,7 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
/>
);
}
return <></>;
return null;
}
private renderAttachmentsStaged() {
@ -785,7 +734,7 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
</>
);
}
return <></>;
return null;
}
private onChooseAttachment() {
@ -838,25 +787,13 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
}
}
private parseEmojis(value: string) {
const emojisArray = toArray(value);
// toArray outputs React elements for emojis and strings for other
return emojisArray.reduce((previous: string, current: any) => {
if (typeof current === 'string') {
return previous + current;
}
return previous + (current.props.children as string);
}, '');
}
// tslint:disable-next-line: cyclomatic-complexity
private async onSendMessage() {
this.abortLinkPreviewFetch();
// this is dirty but we have to replace all @(xxx) by @xxx manually here
const cleanMentions = (text: string): string => {
const matches = text.match(this.mentionsRegex);
const matches = text.match(mentionsRegex);
let replacedMentions = text;
(matches || []).forEach(match => {
const replacedMention = match.substring(2, match.indexOf('\uFFD7'));
@ -866,7 +803,7 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
return replacedMentions;
};
const messagePlaintext = cleanMentions(this.parseEmojis(this.state.draft));
const messagePlaintext = cleanMentions(parseEmojis(this.state.draft));
const { selectedConversation } = this.props;
@ -1008,29 +945,18 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
}
private async onLoadVoiceNoteView() {
// Do stuff for component, then run callback to SessionConversation
const mediaSetting = getMediaPermissionsSettings();
if (mediaSetting) {
this.setState({
showRecordingView: true,
showEmojiPanel: false,
});
this.props.onLoadVoiceNoteView();
if (!getMediaPermissionsSettings()) {
ToastUtils.pushAudioPermissionNeeded();
return;
}
ToastUtils.pushAudioPermissionNeeded(() => {
window.inboxStore?.dispatch(showLeftPaneSection(SectionType.Settings));
window.inboxStore?.dispatch(showSettingsSection(SessionSettingCategory.Privacy));
this.setState({
showRecordingView: true,
showEmojiPanel: false,
});
}
private onExitVoiceNoteView() {
// Do stuff for component, then run callback to SessionConversation
this.setState({ showRecordingView: false });
this.props.onExitVoiceNoteView();
}
private onChange(event: any) {
@ -1039,63 +965,6 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
updateDraftForConversation({ conversationKey: this.props.selectedConversationKey, draft });
}
private getSelectionBasedOnMentions(index: number) {
// we have to get the real selectionStart/end of an index in the mentions box.
// this is kind of a pain as the mentions box has two inputs, one with the real text, and one with the extracted mentions
// the index shown to the user is actually just the visible part of the mentions (so the part between ᅲ...ᅭ
const matches = this.state.draft.match(this.mentionsRegex);
let lastMatchStartIndex = 0;
let lastMatchEndIndex = 0;
let lastRealMatchEndIndex = 0;
if (!matches) {
return index;
}
const mapStartToLengthOfMatches = matches.map(match => {
const displayNameStart = match.indexOf('\uFFD7') + 1;
const displayNameEnd = match.lastIndexOf('\uFFD2');
const displayName = match.substring(displayNameStart, displayNameEnd);
const currentMatchStartIndex = this.state.draft.indexOf(match) + lastMatchStartIndex;
lastMatchStartIndex = currentMatchStartIndex;
lastMatchEndIndex = currentMatchStartIndex + match.length;
const realLength = displayName.length + 1;
lastRealMatchEndIndex = lastRealMatchEndIndex + realLength;
// the +1 is for the @
return {
length: displayName.length + 1,
lastRealMatchEndIndex,
start: lastMatchStartIndex,
end: lastMatchEndIndex,
};
});
const beforeFirstMatch = index < mapStartToLengthOfMatches[0].start;
if (beforeFirstMatch) {
// those first char are always just char, so the mentions logic does not come into account
return index;
}
const lastMatchMap = _.last(mapStartToLengthOfMatches);
if (!lastMatchMap) {
return Number.MAX_SAFE_INTEGER;
}
const indexIsAfterEndOfLastMatch = lastMatchMap.lastRealMatchEndIndex <= index;
if (indexIsAfterEndOfLastMatch) {
const lastEnd = lastMatchMap.end;
const diffBetweenEndAndLastRealEnd = index - lastMatchMap.lastRealMatchEndIndex;
return lastEnd + diffBetweenEndAndLastRealEnd - 1;
}
// now this is the hard part, the cursor is currently between the end of the first match and the start of the last match
// for now, just append it to the end
return Number.MAX_SAFE_INTEGER;
}
private onEmojiClick({ colons }: { colons: string }) {
const messageBox = this.textarea.current;
if (!messageBox) {
@ -1106,7 +975,7 @@ class SessionCompositionBoxInner extends React.Component<Props, State> {
const currentSelectionStart = Number(messageBox.selectionStart);
const realSelectionStart = this.getSelectionBasedOnMentions(currentSelectionStart);
const realSelectionStart = getSelectionBasedOnMentions(draft, currentSelectionStart);
const before = draft.slice(0, realSelectionStart);
const end = draft.slice(realSelectionStart);
@ -1146,10 +1015,11 @@ const mapStateToProps = (state: StateType) => {
quotedMessageProps: getQuotedMessage(state),
selectedConversation: getSelectedConversation(state),
selectedConversationKey: getSelectedConversationKey(state),
typingEnabled: getIsTypingEnabled(state),
theme: getTheme(state),
};
};
const smart = connect(mapStateToProps);
export const SessionCompositionBox = smart(SessionCompositionBoxInner);
export const CompositionBox = smart(CompositionBoxInner);

View File

@ -0,0 +1,60 @@
import React from 'react';
import { SessionIconButton } from '../../icon';
export const AddStagedAttachmentButton = (props: { onClick: () => void }) => {
return (
<SessionIconButton
iconType="plusThin"
backgroundColor={'var(--color-compose-view-button-background)'}
iconSize={'huge2'}
borderRadius="300px"
iconPadding="8px"
onClick={props.onClick}
/>
);
};
export const StartRecordingButton = (props: { onClick: () => void }) => {
return (
<SessionIconButton
iconType="microphone"
iconSize={'huge2'}
backgroundColor={'var(--color-compose-view-button-background)'}
borderRadius="300px"
iconPadding="6px"
onClick={props.onClick}
/>
);
};
export const ToggleEmojiButton = React.forwardRef<HTMLDivElement, { onClick: () => void }>(
(props, ref) => {
return (
<SessionIconButton
iconType="emoji"
ref={ref}
backgroundColor="var(--color-compose-view-button-background)"
iconSize={'huge2'}
borderRadius="300px"
iconPadding="6px"
onClick={props.onClick}
/>
);
}
);
export const SendMessageButton = (props: { onClick: () => void }) => {
return (
<div className="send-message-button">
<SessionIconButton
iconType="send"
backgroundColor={'var(--color-compose-view-button-background)'}
iconSize={'huge2'}
iconRotation={90}
borderRadius="300px"
iconPadding="6px"
onClick={props.onClick}
/>
</div>
);
};

View File

@ -8,7 +8,7 @@ type SProps = {
onMainButtonClick: (e: React.MouseEvent<HTMLDivElement>) => void;
isMuted?: boolean;
hidePopoverArrow?: boolean;
iconType: 'microphone' | 'camera';
iconType: 'microphone' | 'camera' | 'volume';
};
const StyledRoundedButton = styled.div<{ isMuted: boolean }>`
@ -53,6 +53,12 @@ const CameraIcon = (
</svg>
);
const SpeakerIcon = (
<svg viewBox="0 0 24 24" fill="currentColor">
<path d="M15.536 8.464a5 5 0 010 7.072m2.828-9.9a9 9 0 010 12.728M5.586 15H4a1 1 0 01-1-1v-4a1 1 0 011-1h1.586l4.707-4.707C10.923 3.663 12 4.109 12 5v14c0 .891-1.077 1.337-1.707.707L5.586 15z" />
</svg>
);
const MicrophoneIcon = (
<svg viewBox="0 0 58 58" fill="currentColor">
<path d="M44,28c-0.552,0-1,0.447-1,1v6c0,7.72-6.28,14-14,14s-14-6.28-14-14v-6c0-0.553-0.448-1-1-1s-1,0.447-1,1v6c0,8.485,6.644,15.429,15,15.949V56h-5c-0.552,0-1,0.447-1,1s0.448,1,1,1h12c0.552,0,1-0.447,1-1s-0.448-1-1-1h-5v-5.051c8.356-0.52,15-7.465,15-15.949v-6C45,28.447,44.552,28,44,28zM29,46c6.065,0,11-4.935,11-11V11c0-6.065-4.935-11-11-11S18,4.935,18,11v24C18,41.065,22.935,46,29,46z" />
@ -72,7 +78,7 @@ export const DropDownAndToggleButton = (props: SProps) => {
onMainButtonClick(e);
};
const iconToRender =
iconType === 'microphone' ? MicrophoneIcon : iconType === 'camera' ? CameraIcon : null;
iconType === 'microphone' ? MicrophoneIcon : iconType === 'camera' ? CameraIcon : SpeakerIcon;
return (
<StyledContainer isMuted={isMuted || false}>

File diff suppressed because one or more lines are too long

View File

@ -21,6 +21,7 @@ import { SectionType } from '../../../state/ducks/section';
import { getConversationController } from '../../../session/conversations';
import {
blockConvoById,
callRecipient,
clearNickNameByConvoId,
copyPublicKeyByConvoId,
deleteAllMessagesByConvoIdWithConfirmation,
@ -36,8 +37,7 @@ import {
} from '../../../interactions/conversationInteractions';
import { SessionButtonColor } from '../SessionButton';
import { getTimerOptions } from '../../../state/selectors/timerOptions';
import { CallManager, ToastUtils } from '../../../session/utils';
import { getCallMediaPermissionsSettings } from '../settings/SessionSettings';
import { ToastUtils } from '../../../session/utils';
const maxNumberOfPinnedConversations = 5;
@ -357,34 +357,17 @@ export function getStartCallMenuItem(conversationId: string): JSX.Element | null
const hasIncomingCall = useSelector(getHasIncomingCall);
const hasOngoingCall = useSelector(getHasOngoingCall);
const canCall = !(hasIncomingCall || hasOngoingCall);
if (!convoOut?.isPrivate()) {
if (!convoOut?.isPrivate() || convoOut.isMe()) {
return null;
}
return (
<Item
onClick={async () => {
// TODO: either pass param to callRecipient or call different call methods based on item selected.
// TODO: one time redux-persisted permission modal?
const convo = getConversationController().get(conversationId);
if (!canCall) {
ToastUtils.pushUnableToCall();
return;
}
if (!getCallMediaPermissionsSettings()) {
ToastUtils.pushMicAndCameraPermissionNeeded();
return;
}
if (convo) {
convo.callState = 'connecting';
await convo.commit();
await CallManager.USER_callRecipient(convo.id);
}
onClick={() => {
void callRecipient(conversationId, canCall);
}}
>
{'Video Call'}
{window.i18n('menuCall')}
</Item>
);
}

View File

@ -63,6 +63,7 @@ export async function signUp(signUpDetails: {
id: 'hasSyncedInitialConfigurationItem',
value: true,
});
UserUtils.setSignWithRecoveryPhrase(false);
trigger('openInbox');
} catch (e) {
await resetRegistration();
@ -93,6 +94,8 @@ export async function signInWithRecovery(signInDetails: {
await resetRegistration();
await registerSingleDevice(userRecoveryPhrase, 'english', trimName);
UserUtils.setSignWithRecoveryPhrase(true);
trigger('openInbox');
} catch (e) {
await resetRegistration();
@ -119,6 +122,7 @@ export async function signInWithLinking(signInDetails: { userRecoveryPhrase: str
window.Whisper.events.on('configurationMessageReceived', (displayName: string) => {
window.Whisper.events.off('configurationMessageReceived');
UserUtils.setSignInByLinking(false);
UserUtils.setSignWithRecoveryPhrase(true);
done(displayName);
displayNameFromNetwork = displayName;

View File

@ -64,7 +64,7 @@ const SignInContinueButton = (props: {
handleContinueYourSessionClick: () => any;
}) => {
if (props.signInMode === SignInMode.Default) {
return <></>;
return null;
}
return (
<ContinueYourSessionButton
@ -80,7 +80,7 @@ const SignInButtons = (props: {
onLinkDeviceButtonClicked: () => any;
}) => {
if (props.signInMode !== SignInMode.Default) {
return <></>;
return null;
}
return (
<div>

16
ts/hooks/useModulo.ts Normal file
View File

@ -0,0 +1,16 @@
import React from 'react';
// tslint:disable-next-line: no-submodule-imports
import useInterval from 'react-use/lib/useInterval';
export function useModulo(loopBackAt: number, delay: number) {
const [count, setCount] = React.useState(0);
useInterval(() => {
if (count >= loopBackAt) {
setCount(0);
} else {
setCount(count + 1);
}
}, delay);
return { count };
}

View File

@ -0,0 +1,14 @@
import { useModulo } from './useModulo';
export function useModuloWithTripleDots(
localizedString: string,
loopBackAt: number,
delay: number
) {
const modulo = useModulo(loopBackAt, delay);
if (localizedString.endsWith('...')) {
return localizedString.slice(0, localizedString.length - (loopBackAt - modulo.count));
}
return localizedString;
}

View File

@ -0,0 +1,30 @@
import { useSelector } from 'react-redux';
import { UserUtils } from '../session/utils';
import { StateType } from '../state/reducer';
export function useAvatarPath(pubkey: string | undefined) {
return useSelector((state: StateType) => {
if (!pubkey) {
return undefined;
}
return state.conversations.conversationLookup[pubkey]?.avatarPath;
});
}
export function useOurAvatarPath() {
return useAvatarPath(UserUtils.getOurPubKeyStrFromCache());
}
export function useConversationUsername(pubkey: string | undefined) {
return useSelector((state: StateType) => {
if (!pubkey) {
return undefined;
}
const convo = state.conversations.conversationLookup[pubkey];
return convo?.profileName || convo?.name || convo.id;
});
}
export function useOurConversationUsername() {
return useConversationUsername(UserUtils.getOurPubKeyStrFromCache());
}

View File

@ -0,0 +1,90 @@
import { useEffect, useState } from 'react';
import { useSelector } from 'react-redux';
// tslint:disable-next-line: no-submodule-imports
import useMountedState from 'react-use/lib/useMountedState';
import { CallManager } from '../session/utils';
import {
CallManagerOptionsType,
DEVICE_DISABLED_DEVICE_ID,
InputItem,
} from '../session/utils/CallManager';
import {
getCallIsInFullScreen,
getHasOngoingCallWithPubkey,
getSelectedConversationKey,
} from '../state/selectors/conversations';
export function useVideoCallEventsListener(uniqueId: string, onSame: boolean) {
const selectedConversationKey = useSelector(getSelectedConversationKey);
const ongoingCallPubkey = useSelector(getHasOngoingCallWithPubkey);
const isFullScreen = useSelector(getCallIsInFullScreen);
const [localStream, setLocalStream] = useState<MediaStream | null>(null);
const [remoteStream, setRemoteStream] = useState<MediaStream | null>(null);
const [localStreamVideoIsMuted, setLocalStreamVideoIsMuted] = useState(true);
const [ourAudioIsMuted, setOurAudioIsMuted] = useState(false);
const [currentSelectedAudioOutput, setCurrentSelectedAudioOutput] = useState(
DEVICE_DISABLED_DEVICE_ID
);
const [remoteStreamVideoIsMuted, setRemoteStreamVideoIsMuted] = useState(true);
const mountedState = useMountedState();
const [currentConnectedCameras, setCurrentConnectedCameras] = useState<Array<InputItem>>([]);
const [currentConnectedAudioInputs, setCurrentConnectedAudioInputs] = useState<Array<InputItem>>(
[]
);
const [currentConnectedAudioOutputs, setCurrentConnectedAudioOutputs] = useState<
Array<InputItem>
>([]);
useEffect(() => {
if (
(onSame && ongoingCallPubkey === selectedConversationKey) ||
(!onSame && ongoingCallPubkey !== selectedConversationKey)
) {
CallManager.addVideoEventsListener(uniqueId, (options: CallManagerOptionsType) => {
const {
audioInputsList,
audioOutputsList,
camerasList,
isLocalVideoStreamMuted,
isRemoteVideoStreamMuted,
localStream: lLocalStream,
remoteStream: lRemoteStream,
isAudioMuted,
currentSelectedAudioOutput: outputSelected,
} = options;
if (mountedState()) {
setLocalStream(lLocalStream);
setRemoteStream(lRemoteStream);
setRemoteStreamVideoIsMuted(isRemoteVideoStreamMuted);
setLocalStreamVideoIsMuted(isLocalVideoStreamMuted);
setOurAudioIsMuted(isAudioMuted);
setCurrentSelectedAudioOutput(outputSelected);
setCurrentConnectedCameras(camerasList);
setCurrentConnectedAudioInputs(audioInputsList);
setCurrentConnectedAudioOutputs(audioOutputsList);
}
});
}
return () => {
CallManager.removeVideoEventsListener(uniqueId);
};
}, [ongoingCallPubkey, selectedConversationKey, isFullScreen]);
return {
currentConnectedAudioInputs,
currentConnectedAudioOutputs,
currentSelectedAudioOutput,
currentConnectedCameras,
localStreamVideoIsMuted,
remoteStreamVideoIsMuted,
localStream,
remoteStream,
isAudioMuted: ourAudioIsMuted,
isAudioOutputMuted: currentSelectedAudioOutput === DEVICE_DISABLED_DEVICE_ID,
};
}

View File

@ -4,7 +4,7 @@ import {
openGroupV2ConversationIdRegex,
} from '../opengroup/utils/OpenGroupUtils';
import { getV2OpenGroupRoom } from '../data/opengroups';
import { SyncUtils, ToastUtils, UserUtils } from '../session/utils';
import { CallManager, SyncUtils, ToastUtils, UserUtils } from '../session/utils';
import { ConversationNotificationSettingType, ConversationTypeEnum } from '../models/conversation';
import _ from 'lodash';
@ -22,7 +22,9 @@ import {
} from '../state/ducks/modalDialog';
import {
createOrUpdateItem,
getItemById,
getMessageById,
hasLinkPreviewPopupBeenDisplayed,
lastAvatarUploadTimestamp,
removeAllMessagesInConversation,
} from '../data/data';
@ -33,6 +35,7 @@ import { FSv2 } from '../fileserver';
import { fromHexToArray, toHex } from '../session/utils/String';
import { SessionButtonColor } from '../components/session/SessionButton';
import { perfEnd, perfStart } from '../session/utils/Performance';
import { getCallMediaPermissionsSettings } from '../components/session/settings/SessionSettings';
export const getCompleteUrlForV2ConvoId = async (convoId: string) => {
if (convoId.match(openGroupV2ConversationIdRegex)) {
@ -388,3 +391,64 @@ export async function replyToMessage(messageId: string) {
window.inboxStore?.dispatch(quoteMessage(undefined));
}
}
/**
* Check if what is pasted is a URL and prompt confirmation for a setting change
* @param e paste event
*/
export async function showLinkSharingConfirmationModalDialog(e: any) {
const pastedText = e.clipboardData.getData('text');
if (isURL(pastedText) && !window.getSettingValue('link-preview-setting', false)) {
const alreadyDisplayedPopup =
(await getItemById(hasLinkPreviewPopupBeenDisplayed))?.value || false;
if (!alreadyDisplayedPopup) {
window.inboxStore?.dispatch(
updateConfirmModal({
shouldShowConfirm:
!window.getSettingValue('link-preview-setting') && !alreadyDisplayedPopup,
title: window.i18n('linkPreviewsTitle'),
message: window.i18n('linkPreviewsConfirmMessage'),
okTheme: SessionButtonColor.Danger,
onClickOk: () => {
window.setSettingValue('link-preview-setting', true);
},
onClickClose: async () => {
await createOrUpdateItem({ id: hasLinkPreviewPopupBeenDisplayed, value: true });
},
})
);
}
}
}
/**
*
* @param str String to evaluate
* @returns boolean if the string is true or false
*/
function isURL(str: string) {
const urlRegex =
'^(?!mailto:)(?:(?:http|https|ftp)://)(?:\\S+(?::\\S*)?@)?(?:(?:(?:[1-9]\\d?|1\\d\\d|2[01]\\d|22[0-3])(?:\\.(?:1?\\d{1,2}|2[0-4]\\d|25[0-5])){2}(?:\\.(?:[0-9]\\d?|1\\d\\d|2[0-4]\\d|25[0-4]))|(?:(?:[a-z\\u00a1-\\uffff0-9]+-?)*[a-z\\u00a1-\\uffff0-9]+)(?:\\.(?:[a-z\\u00a1-\\uffff0-9]+-?)*[a-z\\u00a1-\\uffff0-9]+)*(?:\\.(?:[a-z\\u00a1-\\uffff]{2,})))|localhost)(?::\\d{2,5})?(?:(/|\\?|#)[^\\s]*)?$';
const url = new RegExp(urlRegex, 'i');
return str.length < 2083 && url.test(str);
}
export async function callRecipient(pubkey: string, canCall: boolean) {
const convo = getConversationController().get(pubkey);
if (!canCall) {
ToastUtils.pushUnableToCall();
return;
}
if (!getCallMediaPermissionsSettings()) {
ToastUtils.pushVideoCallPermissionNeeded();
return;
}
if (convo && convo.isPrivate() && !convo.isMe()) {
convo.callState = 'offering';
await convo.commit();
await CallManager.USER_callRecipient(convo.id);
}
}

View File

@ -44,7 +44,7 @@ import { perfEnd, perfStart } from '../session/utils/Performance';
import {
ReplyingToMessageProps,
SendMessageType,
} from '../components/session/conversation/SessionCompositionBox';
} from '../components/session/conversation/composition/CompositionBox';
import { ed25519Str } from '../session/onions/onionPath';
import { getDecryptedMediaUrl } from '../session/crypto/DecryptedAttachmentsManager';
import { IMAGE_JPEG } from '../types/MIME';
@ -180,8 +180,8 @@ export type CallState = 'offering' | 'incoming' | 'connecting' | 'ongoing' | 'no
export class ConversationModel extends Backbone.Model<ConversationAttributes> {
public updateLastMessage: () => any;
public throttledBumpTyping: any;
public throttledNotify: any;
public throttledBumpTyping: () => void;
public throttledNotify: (message: MessageModel) => void;
public markRead: (newestUnreadDate: number, providedOptions?: any) => Promise<void>;
public initialPromise: any;
@ -192,7 +192,7 @@ export class ConversationModel extends Backbone.Model<ConversationAttributes> {
private typingTimer?: NodeJS.Timeout | null;
private lastReadTimestamp: number;
private pending: any;
private pending?: Promise<any>;
constructor(attributes: ConversationAttributesOptionals) {
super(fillConvoAttributesWithDefaults(attributes));
@ -597,7 +597,7 @@ export class ConversationModel extends Backbone.Model<ConversationAttributes> {
return unreadCount;
}
public queueJob(callback: () => Promise<void>) {
public async queueJob(callback: () => Promise<void>) {
// tslint:disable-next-line: no-promise-as-boolean
const previous = this.pending || Promise.resolve();
@ -606,7 +606,7 @@ export class ConversationModel extends Backbone.Model<ConversationAttributes> {
this.pending = previous.then(taskWithTimeout, taskWithTimeout);
const current = this.pending;
current.then(() => {
void current.then(() => {
if (this.pending === current) {
delete this.pending;
}
@ -874,7 +874,7 @@ export class ConversationModel extends Backbone.Model<ConversationAttributes> {
});
await this.commit();
this.queueJob(async () => {
await this.queueJob(async () => {
await this.sendMessageJob(messageModel, expireTimer);
});
}
@ -1529,6 +1529,36 @@ export class ConversationModel extends Backbone.Model<ConversationAttributes> {
});
}
public async notifyIncomingCall() {
if (!this.isPrivate()) {
window?.log?.info('notifyIncomingCall: not a private convo', this.idForLogging());
return;
}
const conversationId = this.id;
// make sure the notifications are not muted for this convo (and not the source convo)
const convNotif = this.get('triggerNotificationsFor');
if (convNotif === 'disabled') {
window?.log?.info(
'notifyIncomingCall: notifications disabled for convo',
this.idForLogging()
);
return;
}
const now = Date.now();
const iconUrl = await this.getNotificationIcon();
window.Whisper.Notifications.add({
conversationId,
iconUrl,
isExpiringMessage: false,
message: window.i18n('incomingCallFrom', this.getTitle()),
messageSentAt: now,
title: this.getTitle(),
});
}
public async notifyTyping({ isTyping, sender }: any) {
// We don't do anything with typing messages from our other devices
if (UserUtils.isUsFromCache(sender)) {

View File

@ -26,6 +26,13 @@ export async function handleCallMessage(
return;
}
if (type === SignalService.CallMessage.Type.PRE_OFFER) {
await removeFromCache(envelope);
window.log.info('Skipping callMessage PRE_OFFER');
return;
}
if (type === SignalService.CallMessage.Type.OFFER) {
if (Math.max(sentTimestamp - (Date.now() - currentOffset)) > TTL_DEFAULT.CALL_MESSAGE) {
window?.log?.info('Dropping incoming OFFER callMessage sent a while ago: ', sentTimestamp);

View File

@ -626,7 +626,7 @@ export async function handleMessageEvent(event: MessageEvent): Promise<void> {
return;
}
conversation.queueJob(async () => {
void conversation.queueJob(async () => {
if (await isMessageDuplicate(data)) {
window?.log?.info('Received duplicate message. Dropping it.');
confirm();

View File

@ -471,7 +471,7 @@ export async function handleMessageJob(
});
trotthledAllMessagesAddedDispatch();
if (message.get('unread')) {
await conversation.throttledNotify(message);
conversation.throttledNotify(message);
}
if (confirm) {

View File

@ -303,7 +303,7 @@ export async function handleOpenGroupV2Message(
return;
}
conversation.queueJob(async () => {
void conversation.queueJob(async () => {
const isMe = UserUtils.isUsFromCache(sender);
// for an opengroupv2 incoming message the serverTimestamp and the timestamp
const messageCreationData: MessageCreationData = {

View File

@ -8,6 +8,7 @@ interface CallMessageParams extends MessageParams {
sdpMLineIndexes?: Array<number>;
sdpMids?: Array<string>;
sdps?: Array<string>;
uuid: string;
}
export class CallMessage extends ContentMessage {
@ -15,6 +16,7 @@ export class CallMessage extends ContentMessage {
public readonly sdpMLineIndexes?: Array<number>;
public readonly sdpMids?: Array<string>;
public readonly sdps?: Array<string>;
public readonly uuid: string;
constructor(params: CallMessageParams) {
super({ timestamp: params.timestamp, identifier: params.identifier });
@ -22,13 +24,19 @@ export class CallMessage extends ContentMessage {
this.sdpMLineIndexes = params.sdpMLineIndexes;
this.sdpMids = params.sdpMids;
this.sdps = params.sdps;
this.uuid = params.uuid;
// this does not make any sense
if (
this.type !== signalservice.CallMessage.Type.END_CALL &&
this.type !== signalservice.CallMessage.Type.PRE_OFFER &&
(!this.sdps || this.sdps.length === 0)
) {
throw new Error('sdps must be set unless this is a END_CALL type message');
}
if (this.uuid.length === 0) {
throw new Error('uuid must cannot be empty');
}
}
public contentProto(): SignalService.Content {
@ -47,6 +55,7 @@ export class CallMessage extends ContentMessage {
sdpMLineIndexes: this.sdpMLineIndexes,
sdpMids: this.sdpMids,
sdps: this.sdps,
uuid: this.uuid,
});
}
}

View File

@ -12,6 +12,7 @@ import { uploadFileOpenGroupV2 } from '../../opengroup/opengroupV2/OpenGroupAPIV
import { addAttachmentPadding } from '../crypto/BufferPadding';
import { RawPreview, RawQuote } from './Attachments';
import _ from 'lodash';
import { AttachmentsV2Utils } from '.';
interface UploadParamsV2 {
attachment: Attachment;
@ -60,7 +61,7 @@ export async function uploadAttachmentsV2(
openGroup: OpenGroupRequestCommonType
): Promise<Array<AttachmentPointerWithUrl>> {
const promises = (attachments || []).map(async attachment =>
exports.uploadV2({
AttachmentsV2Utils.uploadV2({
attachment,
openGroup,
})
@ -80,7 +81,7 @@ export async function uploadLinkPreviewsV2(
return undefined;
}
const image = await exports.uploadV2({
const image = await AttachmentsV2Utils.uploadV2({
attachment: preview.image,
openGroup,
});
@ -105,10 +106,10 @@ export async function uploadQuoteThumbnailsV2(
const promises = (quote.attachments ?? []).map(async attachment => {
let thumbnail: QuotedAttachment | undefined;
if (attachment.thumbnail) {
thumbnail = await exports.uploadV2({
thumbnail = (await AttachmentsV2Utils.uploadV2({
attachment: attachment.thumbnail,
openGroup,
});
})) as any;
}
return {
...attachment,

View File

@ -1,7 +1,8 @@
import _ from 'lodash';
import { ToastUtils } from '.';
import { MessageUtils, ToastUtils } from '.';
import { getCallMediaPermissionsSettings } from '../../components/session/settings/SessionSettings';
import { getConversationById } from '../../data/data';
import { ConversationModel } from '../../models/conversation';
import { MessageModelType } from '../../models/messageType';
import { SignalService } from '../../protobuf';
import {
@ -9,51 +10,83 @@ import {
callConnected,
endCall,
incomingCall,
openConversationWithMessages,
setFullScreenCall,
startingCallWith,
} from '../../state/ducks/conversations';
import { getConversationController } from '../conversations';
import { CallMessage } from '../messages/outgoing/controlMessage/CallMessage';
import { ed25519Str } from '../onions/onionPath';
import { getMessageQueue } from '../sending';
import { getMessageQueue, MessageSender } from '../sending';
import { PubKey } from '../types';
import { v4 as uuidv4 } from 'uuid';
import { PnServer } from '../../pushnotification';
// import { SoundMeter } from '../../../ts/components/session/calling/SoundMeter';
import { setIsRinging } from './RingingManager';
export type InputItem = { deviceId: string; label: string };
let currentCallUUID: string | undefined;
// const VIDEO_WIDTH = 640;
// const VIDEO_RATIO = 16 / 9;
type CallManagerListener =
| ((
localStream: MediaStream | null,
remoteStream: MediaStream | null,
camerasList: Array<InputItem>,
audioInputsList: Array<InputItem>,
isRemoteVideoStreamMuted: boolean
) => void)
| null;
let videoEventsListener: CallManagerListener;
export type CallManagerOptionsType = {
localStream: MediaStream | null;
remoteStream: MediaStream | null;
camerasList: Array<InputItem>;
audioInputsList: Array<InputItem>;
audioOutputsList: Array<InputItem>;
isLocalVideoStreamMuted: boolean;
isRemoteVideoStreamMuted: boolean;
isAudioMuted: boolean;
currentSelectedAudioOutput: string;
};
function callVideoListener() {
if (videoEventsListener) {
videoEventsListener(
mediaDevices,
remoteStream,
camerasList,
audioInputsList,
remoteVideoStreamIsMuted
);
export type CallManagerListener = ((options: CallManagerOptionsType) => void) | null;
const videoEventsListeners: Array<{ id: string; listener: CallManagerListener }> = [];
function callVideoListeners() {
if (videoEventsListeners.length) {
videoEventsListeners.forEach(item => {
item.listener?.({
localStream: mediaDevices,
remoteStream,
camerasList,
audioInputsList,
audioOutputsList,
isRemoteVideoStreamMuted: remoteVideoStreamIsMuted,
isLocalVideoStreamMuted: selectedCameraId === DEVICE_DISABLED_DEVICE_ID,
isAudioMuted: selectedAudioInputId === DEVICE_DISABLED_DEVICE_ID,
currentSelectedAudioOutput: selectedAudioOutputId,
});
});
}
}
export function setVideoEventsListener(listener: CallManagerListener) {
videoEventsListener = listener;
callVideoListener();
export function addVideoEventsListener(uniqueId: string, listener: CallManagerListener) {
const indexFound = videoEventsListeners.findIndex(m => m.id === uniqueId);
if (indexFound === -1) {
videoEventsListeners.push({ id: uniqueId, listener });
} else {
videoEventsListeners[indexFound].listener = listener;
}
callVideoListeners();
}
export function removeVideoEventsListener(uniqueId: string) {
const indexFound = videoEventsListeners.findIndex(m => m.id === uniqueId);
if (indexFound !== -1) {
videoEventsListeners.splice(indexFound);
}
callVideoListeners();
}
/**
* This field stores all the details received by a sender about a call in separate messages.
* This field stores all the details received about a specific call with the same uuid. It is a per pubkey and per device cache.
*/
const callCache = new Map<string, Array<SignalService.CallMessage>>();
const callCache = new Map<string, Map<string, Array<SignalService.CallMessage>>>();
let peerConnection: RTCPeerConnection | null;
let dataChannel: RTCDataChannel | null;
@ -61,7 +94,7 @@ let remoteStream: MediaStream | null;
let mediaDevices: MediaStream | null;
let remoteVideoStreamIsMuted = true;
export const INPUT_DISABLED_DEVICE_ID = 'off';
export const DEVICE_DISABLED_DEVICE_ID = 'off';
let makingOffer = false;
let ignoreOffer = false;
@ -69,22 +102,26 @@ let isSettingRemoteAnswerPending = false;
let lastOutgoingOfferTimestamp = -Infinity;
const configuration: RTCConfiguration = {
bundlePolicy: 'max-bundle',
rtcpMuxPolicy: 'require',
iceServers: [
{
urls: 'turn:freyr.getsession.org',
username: 'webrtc',
credential: 'webrtc',
username: 'session',
credential: 'session',
},
],
iceTransportPolicy: 'relay',
// iceTransportPolicy: 'relay', // for now, this cause the connection to break after 30-40 sec if we enable this
};
let selectedCameraId: string | undefined;
let selectedAudioInputId: string | undefined;
let selectedCameraId: string = DEVICE_DISABLED_DEVICE_ID;
let selectedAudioInputId: string = DEVICE_DISABLED_DEVICE_ID;
let selectedAudioOutputId: string = DEVICE_DISABLED_DEVICE_ID;
let camerasList: Array<InputItem> = [];
let audioInputsList: Array<InputItem> = [];
let audioOutputsList: Array<InputItem> = [];
async function getConnectedDevices(type: 'videoinput' | 'audioinput') {
async function getConnectedDevices(type: 'videoinput' | 'audioinput' | 'audiooutput') {
const devices = await navigator.mediaDevices.enumerateDevices();
return devices.filter(device => device.kind === type);
}
@ -93,11 +130,12 @@ async function getConnectedDevices(type: 'videoinput' | 'audioinput') {
// tslint:disable-next-line: no-typeof-undefined
if (typeof navigator !== 'undefined') {
navigator.mediaDevices.addEventListener('devicechange', async () => {
await updateInputLists();
callVideoListener();
await updateConnectedDevices();
callVideoListeners();
});
}
async function updateInputLists() {
async function updateConnectedDevices() {
// Get the set of cameras connected
const videoCameras = await getConnectedDevices('videoinput');
@ -112,11 +150,17 @@ async function updateInputLists() {
deviceId: m.deviceId,
label: m.label,
}));
// Get the set of audio outputs connected
const audiosOutput = await getConnectedDevices('audiooutput');
audioOutputsList = audiosOutput.map(m => ({
deviceId: m.deviceId,
label: m.label,
}));
}
function sendVideoStatusViaDataChannel() {
const videoEnabledLocally =
selectedCameraId !== undefined && selectedCameraId !== INPUT_DISABLED_DEVICE_ID;
const videoEnabledLocally = selectedCameraId !== DEVICE_DISABLED_DEVICE_ID;
const stringToSend = JSON.stringify({
video: videoEnabledLocally,
});
@ -125,9 +169,18 @@ function sendVideoStatusViaDataChannel() {
}
}
function sendHangupViaDataChannel() {
const stringToSend = JSON.stringify({
hangup: true,
});
if (dataChannel && dataChannel.readyState === 'open') {
dataChannel?.send(stringToSend);
}
}
export async function selectCameraByDeviceId(cameraDeviceId: string) {
if (cameraDeviceId === INPUT_DISABLED_DEVICE_ID) {
selectedCameraId = cameraDeviceId;
if (cameraDeviceId === DEVICE_DISABLED_DEVICE_ID) {
selectedCameraId = DEVICE_DISABLED_DEVICE_ID;
const sender = peerConnection?.getSenders().find(s => {
return s.track?.kind === 'video';
@ -136,6 +189,7 @@ export async function selectCameraByDeviceId(cameraDeviceId: string) {
sender.track.enabled = false;
}
sendVideoStatusViaDataChannel();
callVideoListeners();
return;
}
if (camerasList.some(m => m.deviceId === cameraDeviceId)) {
@ -164,17 +218,21 @@ export async function selectCameraByDeviceId(cameraDeviceId: string) {
mediaDevices?.removeTrack(t);
});
mediaDevices?.addTrack(videoTrack);
sendVideoStatusViaDataChannel();
callVideoListeners();
} else {
throw new Error('Failed to get sender for selectCameraByDeviceId ');
}
} catch (e) {
window.log.warn('selectCameraByDeviceId failed with', e.message);
callVideoListeners();
}
}
}
export async function selectAudioInputByDeviceId(audioInputDeviceId: string) {
if (audioInputDeviceId === INPUT_DISABLED_DEVICE_ID) {
if (audioInputDeviceId === DEVICE_DISABLED_DEVICE_ID) {
selectedAudioInputId = audioInputDeviceId;
const sender = peerConnection?.getSenders().find(s => {
@ -183,6 +241,7 @@ export async function selectAudioInputByDeviceId(audioInputDeviceId: string) {
if (sender?.track) {
sender.track.enabled = false;
}
callVideoListeners();
return;
}
if (audioInputsList.some(m => m.deviceId === audioInputDeviceId)) {
@ -213,6 +272,25 @@ export async function selectAudioInputByDeviceId(audioInputDeviceId: string) {
} catch (e) {
window.log.warn('selectAudioInputByDeviceId failed with', e.message);
}
callVideoListeners();
}
}
export async function selectAudioOutputByDeviceId(audioOutputDeviceId: string) {
if (audioOutputDeviceId === DEVICE_DISABLED_DEVICE_ID) {
selectedAudioOutputId = audioOutputDeviceId;
console.warn('selectedAudioOutputId', selectedAudioOutputId);
callVideoListeners();
return;
}
if (audioOutputsList.some(m => m.deviceId === audioOutputDeviceId)) {
selectedAudioOutputId = audioOutputDeviceId;
console.warn('selectedAudioOutputId', selectedAudioOutputId);
callVideoListeners();
}
}
@ -229,11 +307,17 @@ async function handleNegotiationNeededEvent(_event: Event, recipient: string) {
}
await peerConnection?.setLocalDescription(offer);
if (!currentCallUUID) {
window.log.warn('cannot send offer without a currentCallUUID');
throw new Error('cannot send offer without a currentCallUUID');
}
if (offer && offer.sdp) {
const offerMessage = new CallMessage({
timestamp: Date.now(),
type: SignalService.CallMessage.Type.OFFER,
sdps: [offer.sdp],
uuid: currentCallUUID,
});
window.log.info('sending OFFER MESSAGE');
@ -262,7 +346,7 @@ function handleIceCandidates(event: RTCPeerConnectionIceEvent, pubkey: string) {
async function openMediaDevicesAndAddTracks() {
try {
await updateInputLists();
await updateConnectedDevices();
if (!camerasList.length) {
ToastUtils.pushNoCameraFound();
return;
@ -272,20 +356,20 @@ async function openMediaDevicesAndAddTracks() {
return;
}
const firstAudio = audioInputsList[0].deviceId;
const firstVideo = camerasList[0].deviceId;
selectedAudioInputId = audioInputsList[0].deviceId;
selectedCameraId = DEVICE_DISABLED_DEVICE_ID;
window.log.info(
`openMediaDevices videoDevice:${firstVideo}:${camerasList[0].label} audioDevice:${firstAudio}`
`openMediaDevices videoDevice:${selectedCameraId}:${camerasList[0].label} audioDevice:${selectedAudioInputId}`
);
const devicesConfig = {
audio: {
deviceId: firstAudio,
deviceId: selectedAudioInputId,
echoCancellation: true,
},
video: {
deviceId: firstVideo,
deviceId: selectedCameraId,
// width: VIDEO_WIDTH,
// height: Math.floor(VIDEO_WIDTH * VIDEO_RATIO),
},
@ -301,26 +385,46 @@ async function openMediaDevicesAndAddTracks() {
}
});
} catch (err) {
ToastUtils.pushMicAndCameraPermissionNeeded();
ToastUtils.pushVideoCallPermissionNeeded();
closeVideoCall();
}
callVideoListener();
callVideoListeners();
}
// tslint:disable-next-line: function-name
export async function USER_callRecipient(recipient: string) {
if (!getCallMediaPermissionsSettings()) {
ToastUtils.pushMicAndCameraPermissionNeeded();
ToastUtils.pushVideoCallPermissionNeeded();
return;
}
await updateInputLists();
if (currentCallUUID) {
window.log.warn(
'Looks like we are already in a call as in USER_callRecipient is not undefined'
);
return;
}
await updateConnectedDevices();
window?.log?.info(`starting call with ${ed25519Str(recipient)}..`);
window.inboxStore?.dispatch(startingCallWith({ pubkey: recipient }));
if (peerConnection) {
throw new Error('USER_callRecipient peerConnection is already initialized ');
}
peerConnection = createOrGetPeerConnection(recipient, true);
currentCallUUID = uuidv4();
peerConnection = createOrGetPeerConnection(recipient);
// send a pre offer just to wake up the device on the remote side
const preOfferMsg = new CallMessage({
timestamp: Date.now(),
type: SignalService.CallMessage.Type.PRE_OFFER,
uuid: currentCallUUID,
});
window.log.info('Sending preOffer message to ', ed25519Str(recipient));
const rawMessage = await MessageUtils.toRawMessage(PubKey.cast(recipient), preOfferMsg);
const { wrappedEnvelope } = await MessageSender.send(rawMessage);
void PnServer.notifyPnServer(wrappedEnvelope, recipient);
await openMediaDevicesAndAddTracks();
setIsRinging(true);
}
const iceCandidates: Array<RTCIceCandidate> = new Array();
@ -345,12 +449,17 @@ const iceSenderDebouncer = _.debounce(async (recipient: string) => {
return null;
})
);
if (!currentCallUUID) {
window.log.warn('Cannot send ice candidates without a currentCallUUID');
return;
}
const callIceCandicates = new CallMessage({
timestamp: Date.now(),
type: SignalService.CallMessage.Type.ICE_CANDIDATES,
sdpMLineIndexes: validCandidates.map(c => c.sdpMLineIndex),
sdpMids: validCandidates.map(c => c.sdpMid),
sdps: validCandidates.map(c => c.candidate),
uuid: currentCallUUID,
});
window.log.info('sending ICE CANDIDATES MESSAGE to ', recipient);
@ -359,11 +468,15 @@ const iceSenderDebouncer = _.debounce(async (recipient: string) => {
}, 2000);
const findLastMessageTypeFromSender = (sender: string, msgType: SignalService.CallMessage.Type) => {
const msgCacheFromSender = callCache.get(sender);
if (!msgCacheFromSender) {
const msgCacheFromSenderWithDevices = callCache.get(sender);
if (!msgCacheFromSenderWithDevices) {
return undefined;
}
const lastOfferMessage = _.findLast(msgCacheFromSender, m => m.type === msgType);
// FIXME this does not sort by timestamp as we do not have a timestamp stored in the SignalService.CallMessage object...
const allMsg = _.flattenDeep([...msgCacheFromSenderWithDevices.values()]);
const allMsgFromType = allMsg.filter(m => m.type === msgType);
const lastOfferMessage = _.last(allMsgFromType);
if (!lastOfferMessage) {
return undefined;
@ -383,6 +496,7 @@ function handleConnectionStateChanged(pubkey: string) {
if (peerConnection?.signalingState === 'closed') {
closeVideoCall();
} else if (peerConnection?.connectionState === 'connected') {
setIsRinging(false);
window.inboxStore?.dispatch(callConnected({ pubkey }));
}
}
@ -420,22 +534,42 @@ function closeVideoCall() {
mediaDevices = null;
remoteStream = null;
if (videoEventsListener) {
videoEventsListener(null, null, [], [], true);
}
selectedCameraId = DEVICE_DISABLED_DEVICE_ID;
selectedAudioInputId = DEVICE_DISABLED_DEVICE_ID;
currentCallUUID = undefined;
callVideoListeners();
window.inboxStore?.dispatch(setFullScreenCall(false));
}
function onDataChannelReceivedMessage(ev: MessageEvent<string>) {
try {
const parsed = JSON.parse(ev.data);
if (parsed.hangup !== undefined) {
const foundEntry = getConversationController()
.getConversations()
.find(
(convo: ConversationModel) =>
convo.callState === 'connecting' ||
convo.callState === 'offering' ||
convo.callState === 'ongoing'
);
if (!foundEntry || !foundEntry.id) {
return;
}
handleCallTypeEndCall(foundEntry.id);
return;
}
if (parsed.video !== undefined) {
remoteVideoStreamIsMuted = !Boolean(parsed.video);
}
callVideoListener();
} catch (e) {
window.log.warn('onDataChannelReceivedMessage Could not parse data in event', ev);
}
callVideoListeners();
}
function onDataChannelOnOpen() {
window.log.info('onDataChannelOnOpen: sending video status');
@ -443,44 +577,37 @@ function onDataChannelOnOpen() {
sendVideoStatusViaDataChannel();
}
function createOrGetPeerConnection(withPubkey: string, createDataChannel: boolean) {
function createOrGetPeerConnection(withPubkey: string, isAcceptingCall = false) {
if (peerConnection) {
return peerConnection;
}
remoteStream = new MediaStream();
peerConnection = new RTCPeerConnection(configuration);
dataChannel = peerConnection.createDataChannel('session-datachannel', {
ordered: true,
negotiated: true,
id: 548, // S E S S I O N in ascii code 83*3+69+73+79+78
});
peerConnection.onnegotiationneeded = async (event: Event) => {
await handleNegotiationNeededEvent(event, withPubkey);
};
dataChannel.onmessage = onDataChannelReceivedMessage;
dataChannel.onopen = onDataChannelOnOpen;
peerConnection.ondatachannel = e => {
if (!createDataChannel) {
dataChannel = e.channel;
window.log.info('Got our datachannel setup');
onDataChannelOnOpen();
dataChannel.onmessage = onDataChannelReceivedMessage;
}
};
if (createDataChannel) {
dataChannel = peerConnection.createDataChannel('session-datachannel');
dataChannel.onmessage = onDataChannelReceivedMessage;
dataChannel.onopen = onDataChannelOnOpen;
if (!isAcceptingCall) {
peerConnection.onnegotiationneeded = async (event: Event) => {
await handleNegotiationNeededEvent(event, withPubkey);
};
}
peerConnection.onsignalingstatechange = handleSignalingStateChangeEvent;
peerConnection.ontrack = event => {
event.track.onunmute = () => {
remoteStream?.addTrack(event.track);
callVideoListener();
callVideoListeners();
};
event.track.onmute = () => {
remoteStream?.removeTrack(event.track);
callVideoListener();
callVideoListeners();
};
};
peerConnection.onconnectionstatechange = () => {
@ -496,14 +623,16 @@ function createOrGetPeerConnection(withPubkey: string, createDataChannel: boolea
// tslint:disable-next-line: function-name
export async function USER_acceptIncomingCallRequest(fromSender: string) {
const msgCacheFromSender = callCache.get(fromSender);
await updateInputLists();
if (!msgCacheFromSender) {
window?.log?.info(
'incoming call request cannot be accepted as the corresponding message is not found'
window.log.info('USER_acceptIncomingCallRequest');
setIsRinging(false);
if (currentCallUUID) {
window.log.warn(
'Looks like we are already in a call as in USER_acceptIncomingCallRequest is not undefined'
);
return;
}
await updateConnectedDevices();
const lastOfferMessage = findLastMessageTypeFromSender(
fromSender,
SignalService.CallMessage.Type.OFFER
@ -515,13 +644,18 @@ export async function USER_acceptIncomingCallRequest(fromSender: string) {
);
return;
}
if (!lastOfferMessage.uuid) {
window?.log?.info('incoming call request cannot be accepted as uuid is invalid');
return;
}
window.inboxStore?.dispatch(answerCall({ pubkey: fromSender }));
await openConversationWithMessages({ conversationKey: fromSender });
if (peerConnection) {
throw new Error('USER_acceptIncomingCallRequest: peerConnection is already set.');
}
currentCallUUID = lastOfferMessage.uuid;
peerConnection = createOrGetPeerConnection(fromSender, false);
peerConnection = createOrGetPeerConnection(fromSender, true);
await openMediaDevicesAndAddTracks();
@ -560,14 +694,21 @@ export async function USER_acceptIncomingCallRequest(fromSender: string) {
// tslint:disable-next-line: function-name
export async function USER_rejectIncomingCallRequest(fromSender: string) {
setIsRinging(false);
const endCallMessage = new CallMessage({
type: SignalService.CallMessage.Type.END_CALL,
timestamp: Date.now(),
uuid: uuidv4(), // just send a random thing, we just want to reject the call
});
callCache.delete(fromSender);
// delete all msg not from that uuid only but from that sender pubkey
window.inboxStore?.dispatch(endCall({ pubkey: fromSender }));
window.log.info('sending END_CALL MESSAGE');
window.inboxStore?.dispatch(
endCall({
pubkey: fromSender,
})
);
window.log.info('USER_rejectIncomingCallRequest');
clearCallCacheFromPubkey(fromSender);
await getMessageQueue().sendToPubKeyNonDurably(PubKey.cast(fromSender), endCallMessage);
@ -581,8 +722,42 @@ export async function USER_rejectIncomingCallRequest(fromSender: string) {
}
}
// tslint:disable-next-line: function-name
export async function USER_hangup(fromSender: string) {
window.log.info('USER_hangup');
if (!currentCallUUID) {
window.log.warn('should not be able to hangup without a currentCallUUID');
return;
} else {
const endCallMessage = new CallMessage({
type: SignalService.CallMessage.Type.END_CALL,
timestamp: Date.now(),
uuid: currentCallUUID,
});
void getMessageQueue().sendToPubKeyNonDurably(PubKey.cast(fromSender), endCallMessage);
}
window.inboxStore?.dispatch(endCall({ pubkey: fromSender }));
window.log.info('sending hangup with an END_CALL MESSAGE');
sendHangupViaDataChannel();
clearCallCacheFromPubkey(fromSender);
const convos = getConversationController().getConversations();
const callingConvos = convos.filter(convo => convo.callState !== undefined);
if (callingConvos.length > 0) {
// we just got a new offer from someone we are already in a call with
if (callingConvos.length === 1 && callingConvos[0].id === fromSender) {
closeVideoCall();
}
}
}
export function handleCallTypeEndCall(sender: string) {
callCache.delete(sender);
clearCallCacheFromPubkey(sender);
window.log.info('handling callMessage END_CALL');
const convos = getConversationController().getConversations();
@ -591,9 +766,7 @@ export function handleCallTypeEndCall(sender: string) {
// we just got a end call event from whoever we are in a call with
if (callingConvos.length === 1 && callingConvos[0].id === sender) {
closeVideoCall();
if (videoEventsListener) {
videoEventsListener(null, null, [], [], true);
}
window.inboxStore?.dispatch(endCall({ pubkey: sender }));
}
}
@ -601,6 +774,11 @@ export function handleCallTypeEndCall(sender: string) {
async function buildAnswerAndSendIt(sender: string) {
if (peerConnection) {
if (!currentCallUUID) {
window.log.warn('cannot send answer without a currentCallUUID');
return;
}
const answer = await peerConnection.createAnswer({
offerToReceiveAudio: true,
offerToReceiveVideo: true,
@ -615,6 +793,7 @@ async function buildAnswerAndSendIt(sender: string) {
timestamp: Date.now(),
type: SignalService.CallMessage.Type.ANSWER,
sdps: [answerSdp],
uuid: currentCallUUID,
});
window.log.info('sending ANSWER MESSAGE');
@ -629,25 +808,28 @@ export async function handleCallTypeOffer(
incomingOfferTimestamp: number
) {
try {
window.log.info('handling callMessage OFFER');
const remoteCallUUID = callMessage.uuid;
if (!remoteCallUUID || remoteCallUUID.length === 0) {
throw new Error('incoming offer call has no valid uuid');
}
window.log.info('handling callMessage OFFER with uuid: ', remoteCallUUID);
const convos = getConversationController().getConversations();
const callingConvos = convos.filter(convo => convo.callState !== undefined);
if (!getCallMediaPermissionsSettings()) {
await handleMissedCall(sender, incomingOfferTimestamp, true);
return;
}
if (callingConvos.length > 0) {
// we just got a new offer from someone we are NOT already in a call with
if (callingConvos.length !== 1 || callingConvos[0].id !== sender) {
await handleMissedCall(sender, incomingOfferTimestamp);
await handleMissedCall(sender, incomingOfferTimestamp, false);
return;
}
}
if (!getCallMediaPermissionsSettings()) {
await handleMissedCall(sender, incomingOfferTimestamp);
// TODO audric show where to turn it on
throw new Error('TODO AUDRIC');
return;
}
const readyForOffer =
!makingOffer && (peerConnection?.signalingState === 'stable' || isSettingRemoteAnswerPending);
const polite = lastOutgoingOfferTimestamp < incomingOfferTimestamp;
@ -671,27 +853,46 @@ export async function handleCallTypeOffer(
await peerConnection.setRemoteDescription(remoteDesc); // SRD rolls back as needed
await buildAnswerAndSendIt(sender);
}
} else {
window.inboxStore?.dispatch(incomingCall({ pubkey: sender }));
// show a notification
const callerConvo = getConversationController().get(sender);
const convNotif = callerConvo?.get('triggerNotificationsFor') || 'disabled';
if (convNotif === 'disabled') {
window?.log?.info('notifications disabled for convo', ed25519Str(sender));
} else if (callerConvo) {
await callerConvo.notifyIncomingCall();
}
setIsRinging(true);
}
// don't need to do the sending here as we dispatch an answer in a
pushCallMessageToCallCache(sender, remoteCallUUID, callMessage);
} catch (err) {
window.log?.error(`Error handling offer message ${err}`);
}
if (!callCache.has(sender)) {
callCache.set(sender, new Array());
}
callCache.get(sender)?.push(callMessage);
window.inboxStore?.dispatch(incomingCall({ pubkey: sender }));
}
async function handleMissedCall(sender: string, incomingOfferTimestamp: number) {
export async function handleMissedCall(
sender: string,
incomingOfferTimestamp: number,
isBecauseOfCallPermission: boolean
) {
const incomingCallConversation = await getConversationById(sender);
ToastUtils.pushedMissedCall(
incomingCallConversation?.getNickname() ||
incomingCallConversation?.getProfileName() ||
'Unknown'
);
setIsRinging(false);
if (!isBecauseOfCallPermission) {
ToastUtils.pushedMissedCall(
incomingCallConversation?.getNickname() ||
incomingCallConversation?.getProfileName() ||
'Unknown'
);
} else {
ToastUtils.pushedMissedCallCauseOfPermission(
incomingCallConversation?.getNickname() ||
incomingCallConversation?.getProfileName() ||
'Unknown'
);
}
await incomingCallConversation?.addSingleMessage({
conversationId: incomingCallConversation.id,
@ -704,6 +905,7 @@ async function handleMissedCall(sender: string, incomingOfferTimestamp: number)
unread: 1,
});
incomingCallConversation?.updateLastMessage();
return;
}
@ -712,14 +914,15 @@ export async function handleCallTypeAnswer(sender: string, callMessage: SignalSe
window.log.warn('cannot handle answered message without signal description protols');
return;
}
const remoteCallUUID = callMessage.uuid;
if (!remoteCallUUID || remoteCallUUID.length === 0) {
window.log.warn('handleCallTypeAnswer has no valid uuid');
return;
}
window.log.info('handling callMessage ANSWER');
if (!callCache.has(sender)) {
callCache.set(sender, new Array());
}
callCache.get(sender)?.push(callMessage);
pushCallMessageToCallCache(sender, remoteCallUUID, callMessage);
if (!peerConnection) {
window.log.info('handleCallTypeAnswer without peer connection. Dropping');
@ -742,13 +945,14 @@ export async function handleCallTypeIceCandidates(
window.log.warn('cannot handle iceCandicates message without candidates');
return;
}
const remoteCallUUID = callMessage.uuid;
if (!remoteCallUUID || remoteCallUUID.length === 0) {
window.log.warn('handleCallTypeIceCandidates has no valid uuid');
return;
}
window.log.info('handling callMessage ICE_CANDIDATES');
if (!callCache.has(sender)) {
callCache.set(sender, new Array());
}
callCache.get(sender)?.push(callMessage);
pushCallMessageToCallCache(sender, remoteCallUUID, callMessage);
await addIceCandidateToExistingPeerConnection(callMessage);
}
@ -775,5 +979,36 @@ async function addIceCandidateToExistingPeerConnection(callMessage: SignalServic
// tslint:disable-next-line: no-async-without-await
export async function handleOtherCallTypes(sender: string, callMessage: SignalService.CallMessage) {
callCache.get(sender)?.push(callMessage);
const remoteCallUUID = callMessage.uuid;
if (!remoteCallUUID || remoteCallUUID.length === 0) {
window.log.warn('handleOtherCallTypes has no valid uuid');
return;
}
pushCallMessageToCallCache(sender, remoteCallUUID, callMessage);
}
function clearCallCacheFromPubkey(sender: string) {
callCache.delete(sender);
}
function createCallCacheForPubkeyAndUUID(sender: string, uuid: string) {
if (!callCache.has(sender)) {
callCache.set(sender, new Map());
}
if (!callCache.get(sender)?.has(uuid)) {
callCache.get(sender)?.set(uuid, new Array());
}
}
function pushCallMessageToCallCache(
sender: string,
uuid: string,
callMessage: SignalService.CallMessage
) {
createCallCacheForPubkeyAndUUID(sender, uuid);
callCache
.get(sender)
?.get(uuid)
?.push(callMessage);
}

View File

@ -0,0 +1,29 @@
const sound = './fixtures/ringing.mp3';
let currentlyRinging = false;
let ringingAudio: HTMLAudioElement | undefined;
function stopRinging() {
if (ringingAudio) {
ringingAudio.pause();
}
}
function startRinging() {
if (!ringingAudio) {
ringingAudio = new Audio(sound);
ringingAudio.loop = true;
}
void ringingAudio.play();
}
export function setIsRinging(isRinging: boolean) {
if (!currentlyRinging && isRinging) {
startRinging();
currentlyRinging = true;
} else if (currentlyRinging && !isRinging) {
stopRinging();
currentlyRinging = false;
}
}

View File

@ -148,24 +148,42 @@ export function pushedMissedCall(conversationName: string) {
);
}
export function pushMicAndCameraPermissionNeeded() {
pushToastInfo(
'micAndCameraPermissionNeeded',
window.i18n('micAndCameraPermissionNeededTitle'),
window.i18n('micAndCameraPermissionNeeded'),
() => {
window.inboxStore?.dispatch(showLeftPaneSection(SectionType.Settings));
window.inboxStore?.dispatch(showSettingsSection(SessionSettingCategory.Privacy));
}
const openPrivacySettings = () => {
window.inboxStore?.dispatch(showLeftPaneSection(SectionType.Settings));
window.inboxStore?.dispatch(showSettingsSection(SessionSettingCategory.Privacy));
};
export function pushedMissedCallCauseOfPermission(conversationName: string) {
const id = 'missedCallPermission';
toast.info(
<SessionToast
title={window.i18n('callMissedTitle')}
description={window.i18n('callMissedCausePermission', conversationName)}
type={SessionToastType.Info}
onToastClick={openPrivacySettings}
/>,
{ toastId: id, updateId: id, autoClose: 10000 }
);
}
export function pushAudioPermissionNeeded(onClicked: () => void) {
export function pushVideoCallPermissionNeeded() {
pushToastInfo(
'videoCallPermissionNeeded',
window.i18n('cameraPermissionNeededTitle'),
window.i18n('cameraPermissionNeeded'),
openPrivacySettings
);
}
export function pushAudioPermissionNeeded() {
pushToastInfo(
'audioPermissionNeeded',
window.i18n('audioPermissionNeededTitle'),
window.i18n('audioPermissionNeeded'),
onClicked
() => {
window.inboxStore?.dispatch(showLeftPaneSection(SectionType.Settings));
window.inboxStore?.dispatch(showSettingsSection(SessionSettingCategory.Privacy));
}
);
}
@ -248,3 +266,7 @@ export function pushNoCameraFound() {
export function pushNoAudioInputFound() {
pushToastWarning('noAudioInputFound', window.i18n('noAudioInputFound'));
}
export function pushNoAudioOutputFound() {
pushToastWarning('noAudioInputFound', window.i18n('noAudioOutputFound'));
}

View File

@ -85,6 +85,14 @@ export function setSignInByLinking(isLinking: boolean) {
window.textsecure.storage.user.setSignInByLinking(isLinking);
}
export function isSignWithRecoveryPhrase(): boolean {
return window.textsecure.storage.user.isSignWithRecoveryPhrase();
}
export function setSignWithRecoveryPhrase(isLinking: boolean) {
window.textsecure.storage.user.setSignWithRecoveryPhrase(isLinking);
}
export interface OurLokiProfile {
displayName: string;
avatarPointer: string;

View File

@ -13,7 +13,7 @@ import {
PropsForDataExtractionNotification,
} from '../../models/messageType';
import { LightBoxOptions } from '../../components/session/conversation/SessionConversation';
import { ReplyingToMessageProps } from '../../components/session/conversation/SessionCompositionBox';
import { ReplyingToMessageProps } from '../../components/session/conversation/composition/CompositionBox';
import { QuotedAttachmentType } from '../../components/conversation/Quote';
import { perfEnd, perfStart } from '../../session/utils/Performance';
import { omit } from 'lodash';
@ -277,6 +277,7 @@ export type ConversationsStateType = {
quotedMessage?: ReplyingToMessageProps;
areMoreMessagesBeingFetched: boolean;
haveDoneFirstScroll: boolean;
callIsInFullScreen: boolean;
showScrollButton: boolean;
animateQuotedMessageId?: string;
@ -371,6 +372,7 @@ export function getEmptyConversationState(): ConversationsStateType {
mentionMembers: [],
firstUnreadMessageId: undefined,
haveDoneFirstScroll: false,
callIsInFullScreen: false,
};
}
@ -696,6 +698,8 @@ const conversationsSlice = createSlice({
return {
conversationLookup: state.conversationLookup,
callIsInFullScreen: state.callIsInFullScreen,
selectedConversation: action.payload.id,
areMoreMessagesBeingFetched: false,
messages: action.payload.initialMessages,
@ -850,6 +854,10 @@ const conversationsSlice = createSlice({
void foundConvo.commit();
return state;
},
setFullScreenCall(state: ConversationsStateType, action: PayloadAction<boolean>) {
state.callIsInFullScreen = action.payload;
return state;
},
},
extraReducers: (builder: any) => {
// Add reducers for additional action types here, and handle loading state as needed
@ -915,6 +923,7 @@ export const {
answerCall,
callConnected,
startingCallWith,
setFullScreenCall,
} = actions;
export async function openConversationWithMessages(args: {

View File

@ -1,6 +1,6 @@
import { createSlice, PayloadAction } from '@reduxjs/toolkit';
import _ from 'lodash';
import { StagedAttachmentType } from '../../components/session/conversation/SessionCompositionBox';
import { StagedAttachmentType } from '../../components/session/conversation/composition/CompositionBox';
export type StagedAttachmentsStateType = {
stagedAttachments: { [conversationKey: string]: Array<StagedAttachmentType> };

View File

@ -21,7 +21,7 @@ import {
ConversationHeaderTitleProps,
} from '../../components/conversation/ConversationHeader';
import { LightBoxOptions } from '../../components/session/conversation/SessionConversation';
import { ReplyingToMessageProps } from '../../components/session/conversation/SessionCompositionBox';
import { ReplyingToMessageProps } from '../../components/session/conversation/composition/CompositionBox';
import { getConversationController } from '../../session/conversations';
import { UserUtils } from '../../session/utils';
import { MessageAvatarSelectorProps } from '../../components/conversation/message/MessageAvatar';
@ -49,6 +49,7 @@ export const getConversationLookup = createSelector(
export const getConversationsCount = createSelector(getConversationLookup, (state): number => {
return Object.values(state).length;
});
export const getBlockedPubkeys = createSelector(
// make sure to extends this selector to we are rerun on conversation changes
getConversationLookup,
@ -81,9 +82,22 @@ export const getSelectedConversationIsPublic = createSelector(
}
);
const getConversationId = (_whatever: any, id: string) => id;
export const getConversationById = createSelector(
getConversations,
getConversationId,
(
state: ConversationsStateType,
convoId: string | undefined
): ReduxConversationType | undefined => {
return convoId ? state.conversationLookup[convoId] : undefined;
}
);
export const getHasIncomingCallFrom = createSelector(
getConversations,
(state: ConversationsStateType): ReduxConversationType | undefined => {
(state: ConversationsStateType): string | undefined => {
const foundEntry = Object.entries(state.conversationLookup).find(
([_convoKey, convo]) => convo.callState === 'incoming'
);
@ -91,7 +105,7 @@ export const getHasIncomingCallFrom = createSelector(
if (!foundEntry) {
return undefined;
}
return foundEntry[1];
return foundEntry[1].id;
}
);
@ -114,7 +128,7 @@ export const getHasOngoingCallWith = createSelector(
export const getHasIncomingCall = createSelector(
getHasIncomingCallFrom,
(withConvo: ReduxConversationType | undefined): boolean => !!withConvo
(withConvo: string | undefined): boolean => !!withConvo
);
export const getHasOngoingCall = createSelector(
@ -122,6 +136,74 @@ export const getHasOngoingCall = createSelector(
(withConvo: ReduxConversationType | undefined): boolean => !!withConvo
);
export const getHasOngoingCallWithPubkey = createSelector(
getHasOngoingCallWith,
(withConvo: ReduxConversationType | undefined): string | undefined => withConvo?.id
);
export const getHasOngoingCallWithFocusedConvo = createSelector(
getHasOngoingCallWithPubkey,
getSelectedConversationKey,
(withPubkey, selectedPubkey) => {
return withPubkey && withPubkey === selectedPubkey;
}
);
export const getHasOngoingCallWithFocusedConvoIsOffering = createSelector(
getConversations,
getSelectedConversationKey,
(state: ConversationsStateType, selectedConvoPubkey?: string): boolean => {
if (!selectedConvoPubkey) {
return false;
}
const isOffering = state.conversationLookup[selectedConvoPubkey]?.callState === 'offering';
return Boolean(isOffering);
}
);
export const getHasOngoingCallWithFocusedConvosIsConnecting = createSelector(
getConversations,
getSelectedConversationKey,
(state: ConversationsStateType, selectedConvoPubkey?: string): boolean => {
if (!selectedConvoPubkey) {
return false;
}
const isOffering = state.conversationLookup[selectedConvoPubkey]?.callState === 'connecting';
return Boolean(isOffering);
}
);
export const getHasOngoingCallWithNonFocusedConvo = createSelector(
getHasOngoingCallWithPubkey,
getSelectedConversationKey,
(withPubkey, selectedPubkey) => {
return withPubkey && withPubkey !== selectedPubkey;
}
);
export const getCallIsInFullScreen = createSelector(
getConversations,
(state: ConversationsStateType): boolean => state.callIsInFullScreen
);
export const getIsTypingEnabled = createSelector(
getConversations,
getSelectedConversationKey,
(state: ConversationsStateType, selectedConvoPubkey?: string): boolean => {
if (!selectedConvoPubkey) {
return false;
}
const selectedConvo = state.conversationLookup[selectedConvoPubkey];
if (!selectedConvo) {
return false;
}
const { isBlocked, isKickedFromGroup, left } = selectedConvo;
return !(isBlocked || isKickedFromGroup || left);
}
);
/**
* Returns true if the current conversation selected is a group conversation.
* Returns false if the current conversation selected is not a group conversation, or none are selected
@ -508,6 +590,20 @@ export const getConversationHeaderProps = createSelector(getSelectedConversation
};
});
export const getIsSelectedPrivate = createSelector(
getConversationHeaderProps,
(headerProps): boolean => {
return headerProps?.isPrivate || false;
}
);
export const getIsSelectedNoteToSelf = createSelector(
getConversationHeaderProps,
(headerProps): boolean => {
return headerProps?.isMe || false;
}
);
export const getNumberOfPinnedConversations = createSelector(getConversations, (state): number => {
const values = Object.values(state.conversationLookup);
return values.filter(conversation => conversation.isPinned).length;

View File

@ -1,5 +1,5 @@
import { createSelector } from 'reselect';
import { StagedAttachmentType } from '../../components/session/conversation/SessionCompositionBox';
import { StagedAttachmentType } from '../../components/session/conversation/composition/CompositionBox';
import { StagedAttachmentsStateType } from '../ducks/stagedAttachments';
import { StateType } from '../reducer';
import { getSelectedConversationKey } from './conversations';

View File

@ -4,6 +4,7 @@ import { SessionConversation } from '../../components/session/conversation/Sessi
import { StateType } from '../reducer';
import { getTheme } from '../selectors/theme';
import {
getHasOngoingCallWithFocusedConvo,
getLightBoxOptions,
getSelectedConversation,
getSelectedConversationKey,
@ -27,6 +28,7 @@ const mapStateToProps = (state: StateType) => {
selectedMessages: getSelectedMessageIds(state),
lightBoxOptions: getLightBoxOptions(state),
stagedAttachments: getStagedAttachmentsForCurrentConversation(state),
hasOngoingCallWithFocusedConvo: getHasOngoingCallWithFocusedConvo(state),
};
};

View File

@ -0,0 +1,80 @@
// tslint:disable: no-console
// tslint:disable no-implicit-dependencies
import { expect } from 'chai';
import { _electron as electron, ElectronApplication, Page } from 'playwright';
const NODE_ENV = 'integration-test';
function throwIfNoFirstInstance(
instanceToCastIfValid: ElectronApplication | null,
pageToCastIfValid: Page | null
): { instance: ElectronApplication; page: Page } {
if (!instanceToCastIfValid) {
throw new Error('no instanceToCastIfValid');
}
if (!pageToCastIfValid) {
throw new Error('no pageToCastIfValid');
}
return { page: pageToCastIfValid, instance: instanceToCastIfValid };
}
async function createAppInstance(MULTI: number) {
// Launch Electron app.
process.env.NODE_ENV = NODE_ENV;
process.env.NODE_APP_INSTANCE = `${MULTI}`;
const instance = await electron.launch({
args: ['main.js'],
});
// Get the first window that the app opens, wait if necessary.
const page = await instance.firstWindow();
// page.on('console', console.log);
return { instance, page };
}
async function killAppInstance(appInstance?: ElectronApplication | null) {
// Kill Electron app.
if (appInstance) {
await appInstance.close();
}
return null;
}
describe('quick test', () => {
let firstAppInstance: ElectronApplication | null = null;
let firstAppPage: Page | null = null;
beforeEach(async () => {
if (firstAppInstance) {
throw new Error('beforeAll cannot create first instance');
}
const { instance, page } = await createAppInstance(1);
firstAppInstance = instance;
firstAppPage = page;
});
afterEach(async () => {
firstAppInstance = await killAppInstance(firstAppInstance);
});
it('check "Begin your Session" is shown on app start', async () => {
const { instance, page } = throwIfNoFirstInstance(firstAppInstance, firstAppPage);
// Evaluation expression in the Electron context.
const appPath = await instance.evaluate(async ({ app }) => {
// This runs in the main Electron process, parameter here is always
// the result of the require('electron') in the main app script.
return app.getAppPath();
});
console.log(appPath);
// Print the title.instance
const title = await page.title();
const beginSessionSelector = await page.waitForSelector(
'div.session-content-accent-text.title'
);
const contentBeginYourSession = await beginSessionSelector.innerHTML();
expect(contentBeginYourSession).to.equal('Begin your Session.');
expect(title).to.eq('Session');
});
});

View File

@ -1,4 +1,4 @@
import { StagedAttachmentType } from '../components/session/conversation/SessionCompositionBox';
import { StagedAttachmentType } from '../components/session/conversation/composition/CompositionBox';
import { SignalService } from '../protobuf';
import { Constants } from '../session';
import loadImage from 'blueimp-load-image';

896
yarn.lock

File diff suppressed because it is too large Load Diff