fix: make sure to include the associatedWith to handle 421

This commit is contained in:
Audric Ackermann 2023-05-26 10:51:02 +10:00
parent 2b3e490ade
commit 7b42c64cf3
18 changed files with 176 additions and 135 deletions

View File

@ -1,7 +1,9 @@
import React from 'react';
import { CSSProperties } from 'styled-components';
export const MessageView = () => {
const noDragStyle = { '-webkit-user-drag': 'none' } as CSSProperties;
export class MessageView extends React.Component {
public render() {
return (
<div className="conversation placeholder">
<div className="conversation-header" />
@ -11,15 +13,16 @@ export class MessageView extends React.Component {
src="images/session/brand.svg"
className="session-brand-logo"
alt="full-brand-logo"
style={noDragStyle}
/>
<img
src="images/session/session-text.svg"
className="session-text-logo"
alt="full-brand-logo"
style={noDragStyle}
/>
</div>
</div>
</div>
);
}
}
};

View File

@ -40,7 +40,10 @@ import { LeftPaneSectionContainer } from './LeftPaneSectionContainer';
import { SettingsKey } from '../../data/settings-key';
import { getLatestReleaseFromFileServer } from '../../session/apis/file_server_api/FileServerApi';
import { forceRefreshRandomSnodePool } from '../../session/apis/snode_api/snodePool';
import {
forceRefreshRandomSnodePool,
getFreshSwarmFor,
} from '../../session/apis/snode_api/snodePool';
import { isDarkTheme } from '../../state/selectors/theme';
import { ThemeStateType } from '../../themes/constants/colors';
import { switchThemeTo } from '../../themes/switchTheme';
@ -198,6 +201,7 @@ const doAppStartUp = async () => {
void triggerSyncIfNeeded();
void getSwarmPollingInstance().start();
void loadDefaultRooms();
void getFreshSwarmFor(UserUtils.getOurPubKeyStrFromCache()); // refresh our swarm on start to speed up the first message fetching event
// TODOLATER make this a job of the JobRunner
debounce(triggerAvatarReUploadIfNeeded, 200);

View File

@ -104,7 +104,18 @@ async function mergeConfigsWithIncomingUpdates(
`printDumpsForDebugging: before merge of ${variant}:`,
StringUtils.toHex(await GenericWrapperActions.dump(variant))
);
for (let index = 0; index < toMerge.length; index++) {
const element = toMerge[index];
window.log.info(
`printDumpsForDebugging: toMerge of ${index}:${element.hash}: ${StringUtils.toHex(
element.data
)} `,
StringUtils.toHex(await GenericWrapperActions.dump(variant))
);
}
}
const mergedCount = await GenericWrapperActions.merge(variant, toMerge);
const needsPush = await GenericWrapperActions.needsPush(variant);
const needsDump = await GenericWrapperActions.needsDump(variant);
@ -354,8 +365,9 @@ async function handleCommunitiesUpdate() {
});
}
// this call can take quite a long time and should not cause issues to not be awaited
void Promise.all(
// this call can take quite a long time but must be awaited (as it is async and create the entry in the DB, used as a diff)
try {
await Promise.all(
communitiesToJoinInDB.map(async toJoin => {
window.log.info('joining community with convoId ', toJoin.fullUrlWithPubkey);
return getOpenGroupManager().attemptConnectionV2OneAtATime(
@ -365,6 +377,12 @@ async function handleCommunitiesUpdate() {
);
})
);
} catch (e) {
window.log.warn(
`joining community with failed with one of ${communitiesToJoinInDB}`,
e.message
);
}
// if the convos already exists, make sure to update the fields if needed
for (let index = 0; index < allCommunitiesInWrapper.length; index++) {

View File

@ -303,7 +303,7 @@ async function getSnodesFromSeedUrl(urlObj: URL): Promise<Array<any>> {
}
return validNodes;
} catch (e) {
window?.log?.error('Invalid json response');
window?.log?.error('Invalid json response. error:', e.message);
throw new Error(`getSnodesFromSeedUrl: cannot parse content as JSON from ${urlObj.href}`);
}
}

View File

@ -18,7 +18,7 @@ export async function doSnodeBatchRequest(
subRequests: Array<SnodeApiSubRequests>,
targetNode: Snode,
timeout: number,
associatedWith?: string,
associatedWith: string | null,
method: 'batch' | 'sequence' = 'batch'
): Promise<NotEmptyArrayOfBatchResults> {
// console.warn(
@ -49,7 +49,7 @@ export async function doSnodeBatchRequest(
await processOnionRequestErrorAtDestination({
statusCode: resultRow.code,
body: JSON.stringify(resultRow.body),
associatedWith,
associatedWith: associatedWith || undefined,
destinationSnodeEd25519: targetNode.pubkey_ed25519,
});
}

View File

@ -18,7 +18,7 @@ function getNetworkTimeSubRequests(): Array<NetworkTimeSubRequest> {
// tslint:disable-next-line: variable-name
const getNetworkTime = async (snode: Snode): Promise<string | number> => {
const subRequests = getNetworkTimeSubRequests();
const result = await doSnodeBatchRequest(subRequests, snode, 4000);
const result = await doSnodeBatchRequest(subRequests, snode, 4000, null);
if (!result || !result.length) {
window?.log?.warn(`getNetworkTime on ${snode.ip}:${snode.port} returned falsish value`, result);
throw new Error('getNetworkTime: Invalid result');

View File

@ -32,7 +32,7 @@ function buildSnodeListRequests(): Array<GetServiceNodesSubRequest> {
*/
async function getSnodePoolFromSnode(targetNode: Snode): Promise<Array<Snode>> {
const requests = buildSnodeListRequests();
const results = await doSnodeBatchRequest(requests, targetNode, 4000);
const results = await doSnodeBatchRequest(requests, targetNode, 4000, null);
const firstResult = results[0];

View File

@ -708,7 +708,7 @@ async function handle421InvalidSwarm({
if (parsedBody?.snodes?.length) {
// the snode gave us the new swarm. Save it for the next retry
window?.log?.warn(
'Wrong swarm, now looking at snodes',
`Wrong swarm, now looking for pk ${ed25519Str(associatedWith)} at snodes: `,
parsedBody.snodes.map((s: any) => ed25519Str(s.pubkey_ed25519))
);

View File

@ -41,7 +41,7 @@ async function getSessionIDForOnsName(onsNameCase: string) {
const promises = range(0, validationCount).map(async () => {
const targetNode = await getRandomSnode();
const results = await doSnodeBatchRequest(onsResolveRequests, targetNode, 4000);
const results = await doSnodeBatchRequest(onsResolveRequests, targetNode, 4000, null);
const firstResult = results[0];
if (!firstResult || firstResult.code !== 200 || !firstResult.body) {
throw new Error('ONSresolve:Failed to resolve ONS');

View File

@ -124,7 +124,12 @@ async function retrieveNextMessages(
// let exceptions bubble up
// no retry for this one as this a call we do every few seconds while polling for messages
const results = await doSnodeBatchRequest(retrieveRequestsParams, targetNode, 4000);
const results = await doSnodeBatchRequest(
retrieveRequestsParams,
targetNode,
4000,
associatedWith
);
if (!results || !results.length) {
window?.log?.warn(

View File

@ -29,7 +29,7 @@ async function doRequest({
url: string;
options: LokiFetchOptions;
targetNode?: Snode;
associatedWith?: string;
associatedWith: string | null;
timeout: number;
}): Promise<undefined | SnodeResponse> {
const method = options.method || 'GET';
@ -52,7 +52,7 @@ async function doRequest({
targetNode,
body: fetchOptions.body,
headers: fetchOptions.headers,
associatedWith,
associatedWith: associatedWith || undefined,
});
if (!fetchResult) {
return undefined;
@ -117,7 +117,7 @@ export async function snodeRpc(
method: string;
params: Record<string, any> | Array<Record<string, any>>;
targetNode: Snode;
associatedWith?: string;
associatedWith: string | null;
timeout?: number;
} //the user pubkey this call is for. if the onion request fails, this is used to handle the error for this user swarm for instance
): Promise<undefined | SnodeResponse> {

View File

@ -313,12 +313,27 @@ export async function getSwarmFor(pubkey: string): Promise<Array<Snode>> {
return goodNodes;
}
// Request new node list from the network and save it
return getSwarmFromNetworkAndSave(pubkey);
}
/**
* Force a request to be made to the network to fetch the swarm of the specificied pubkey, and cache the result.
* Note: should not be called directly unless you know what you are doing. Use the cached `getSwarmFor()` function instead
* @param pubkey the pubkey to request the swarm for
* @returns the fresh swarm, shuffled
*/
export async function getFreshSwarmFor(pubkey: string): Promise<Array<Snode>> {
return getSwarmFromNetworkAndSave(pubkey);
}
async function getSwarmFromNetworkAndSave(pubkey: string) {
// Request new node list from the network
const swarm = await requestSnodesForPubkeyFromNetwork(pubkey);
const mixedSwarm = shuffle(swarm);
const shuffledSwarm = shuffle(swarm);
const edkeys = mixedSwarm.map((n: Snode) => n.pubkey_ed25519);
const edkeys = shuffledSwarm.map((n: Snode) => n.pubkey_ed25519);
await internalUpdateSwarmFor(pubkey, edkeys);
return mixedSwarm;
return shuffledSwarm;
}

View File

@ -6,27 +6,26 @@ import { PubKey } from '../../types';
import { ERROR_CODE_NO_CONNECT } from './SNodeAPI';
import * as snodePool from './snodePool';
import pRetry from 'p-retry';
import { ConversationModel } from '../../../models/conversation';
import { ConfigMessageHandler } from '../../../receiver/configMessage';
import { decryptEnvelopeWithOurKey } from '../../../receiver/contentMessage';
import { EnvelopePlus } from '../../../receiver/types';
import { updateIsOnline } from '../../../state/ducks/onion';
import { ReleasedFeatures } from '../../../util/releaseFeature';
import {
GenericWrapperActions,
UserGroupsWrapperActions,
} from '../../../webworker/workers/browser/libsession_worker_interface';
import { DURATION, SWARM_POLLING_TIMEOUT } from '../../constants';
import { getConversationController } from '../../conversations';
import { IncomingMessage } from '../../messages/incoming/IncomingMessage';
import { ed25519Str } from '../../onions/onionPath';
import { StringUtils, UserUtils } from '../../utils';
import { perfEnd, perfStart } from '../../utils/Performance';
import { LibSessionUtil } from '../../utils/libsession/libsession_utils';
import { SnodeNamespace, SnodeNamespaces } from './namespaces';
import { SnodeAPIRetrieve } from './retrieveRequest';
import { RetrieveMessageItem, RetrieveMessagesResultsBatched } from './types';
import { ReleasedFeatures } from '../../../util/releaseFeature';
import { LibSessionUtil } from '../../utils/libsession/libsession_utils';
import {
GenericWrapperActions,
UserGroupsWrapperActions,
} from '../../../webworker/workers/browser/libsession_worker_interface';
export function extractWebSocketContent(
message: string,
@ -404,8 +403,6 @@ export class SwarmPolling {
const pkStr = pubkey.key;
try {
return await pRetry(
async () => {
const prevHashes = await Promise.all(
namespaces.map(namespace => this.getLastHash(snodeEdkey, pkStr, namespace))
);
@ -480,17 +477,6 @@ export class SwarmPolling {
);
return results;
},
{
minTimeout: 100,
retries: 1,
onFailedAttempt: e => {
window?.log?.warn(
`retrieveNextMessages attempt #${e.attemptNumber} failed. ${e.retriesLeft} retries left... ${e.name}`
);
},
}
);
} catch (e) {
if (e.message === ERROR_CODE_NO_CONNECT) {
if (window.inboxStore?.getState().onionPaths.isOnline) {

View File

@ -515,6 +515,8 @@ async function leaveClosedGroup(groupId: string, fromSyncMessage: boolean) {
namespace: SnodeNamespaces.ClosedGroupMessage,
pubkey: PubKey.cast(groupId),
});
// TODO our leaving message might fail to be sent for some specific reason we want to still delete the group.
// for instance, if we do not have the encryption keypair anymore, we cannot send our left message, but we should still delete it's content
if (wasSent) {
window?.log?.info(
`Leaving message sent ${groupId}. Removing everything related to this group.`

View File

@ -312,7 +312,7 @@ export async function testGuardNode(snode: Snode) {
response = await insecureNodeFetch(url, fetchOptions);
} catch (e) {
if (e.type === 'request-timeout') {
window?.log?.warn('test timeout for node,', ed25519Str(snode.pubkey_ed25519));
window?.log?.warn('test :,', ed25519Str(snode.pubkey_ed25519));
}
if (e.code === 'ENETUNREACH') {
window?.log?.warn('no network on node,', snode);

View File

@ -71,6 +71,13 @@ async function updateProfileOfContact(
avatarChanged = true; // allow changes from strings to null/undefined to trigger a AvatarDownloadJob. If that happens, we want to remove the local attachment file.
}
// if we have a local path to an downloaded avatar, but no corresponding url/key for it, it means that
// the avatar was most likely removed so let's remove our link to that file.
if ((!profileUrl || !profileKeyHex) && conversation.get('avatarInProfile')) {
conversation.set({ avatarInProfile: undefined });
changes = true;
}
if (changes) {
await conversation.commit();
}

View File

@ -236,7 +236,7 @@ async function sendMessagesDataToSnode(
signedDeleteOldHashesRequest
);
if (snode) {
if (!isEmpty(storeResults)) {
window?.log?.info(
`sendMessagesToSnode - Successfully stored messages to ${ed25519Str(destination)} via ${
snode.ip

View File

@ -223,6 +223,7 @@ class ConfigurationSyncJob extends PersistedJob<ConfigurationSyncPersistedData>
window.log.info(
`ConfigurationSyncJob: unexpected result length: expected ${expectedReplyLength} but got ${result?.length}`
);
// this might be a 421 error (already handled) so let's retry this request a little bit later
return RunJobResult.RetryJobIfPossible;
}