Add some test for onion path rebuild handling

This commit is contained in:
Audric Ackermann 2021-05-19 17:04:21 +10:00
parent 5cab7908b4
commit f4c3c82e31
No known key found for this signature in database
GPG key ID: 999F434D76324AD4
6 changed files with 364 additions and 67 deletions

View file

@ -127,7 +127,7 @@
"@types/blueimp-load-image": "^2.23.8",
"@types/buffer-crc32": "^0.2.0",
"@types/bytebuffer": "^5.0.41",
"@types/chai": "4.1.2",
"@types/chai": "^4.2.18",
"@types/chai-as-promised": "^7.1.2",
"@types/classnames": "2.2.3",
"@types/color": "^3.0.0",
@ -165,7 +165,7 @@
"arraybuffer-loader": "1.0.3",
"asar": "0.14.0",
"bower": "1.8.2",
"chai": "4.1.2",
"chai": "4.3.4",
"chai-as-promised": "^7.1.1",
"chai-bytes": "^0.1.2",
"css-loader": "^3.6.0",

View file

@ -1,4 +1,5 @@
import * as SnodePool from './snodePool';
import * as SNodeAPI from './SNodeAPI';
import * as Onions from './onions';
export { SnodePool, SNodeAPI };
export { SnodePool, SNodeAPI, Onions };

View file

@ -1,4 +1,4 @@
import { default as insecureNodeFetch, Response } from 'node-fetch';
import { default as insecureNodeFetch } from 'node-fetch';
import https from 'https';
import {
@ -296,18 +296,18 @@ async function processAnyOtherErrorAtDestination(
}
async function processOnionRequestErrorOnPath(
response: Response,
httpStatusCode: number, // this is the one on the response object, not inside the json response
ciphertext: string,
guardNodeEd25519: string,
lsrpcEd25519Key?: string,
associatedWith?: string
) {
if (response.status !== 200) {
if (httpStatusCode !== 200) {
console.warn('errorONpath:', ciphertext);
}
process406Error(response.status);
await process421Error(response.status, ciphertext, associatedWith, lsrpcEd25519Key);
await processAnyOtherErrorOnPath(response.status, guardNodeEd25519, ciphertext, associatedWith);
process406Error(httpStatusCode);
await process421Error(httpStatusCode, ciphertext, associatedWith, lsrpcEd25519Key);
await processAnyOtherErrorOnPath(httpStatusCode, guardNodeEd25519, ciphertext, associatedWith);
}
function processAbortedRequest(abortSignal?: AbortSignal) {
@ -320,11 +320,22 @@ function processAbortedRequest(abortSignal?: AbortSignal) {
const debug = false;
// Process a response as it arrives from `fetch`, handling
// http errors and attempting to decrypt the body with `sharedKey`
// tslint:disable-next-line: cyclomatic-complexity
async function processOnionResponse(
response: Response,
/**
* Only exported for testing purpose
*/
export async function decodeOnionResult(symmetricKey: ArrayBuffer, ciphertext: string) {
const ciphertextBuffer = fromBase64ToArrayBuffer(ciphertext);
const plaintextBuffer = await window.libloki.crypto.DecryptAESGCM(symmetricKey, ciphertextBuffer);
return { plaintext: new TextDecoder().decode(plaintextBuffer), ciphertextBuffer };
}
/**
* Only exported for testing purpose
*/
export async function processOnionResponse(
response: { text: () => Promise<string>; status: number },
symmetricKey: ArrayBuffer,
guardNode: Snode,
lsrpcEd25519Key?: string,
@ -342,7 +353,7 @@ async function processOnionResponse(
}
await processOnionRequestErrorOnPath(
response,
response.status,
ciphertext,
guardNode.pubkey_ed25519,
lsrpcEd25519Key,
@ -366,13 +377,11 @@ async function processOnionResponse(
// just try to get a json object from what is inside (for PN requests), if it fails, continue ()
}
try {
ciphertextBuffer = fromBase64ToArrayBuffer(ciphertext);
const plaintextBuffer = await window.libloki.crypto.DecryptAESGCM(
symmetricKey,
ciphertextBuffer
);
plaintext = new TextDecoder().decode(plaintextBuffer);
const decoded = await exports.decodeOnionResult(symmetricKey, ciphertext);
plaintext = decoded.plaintext;
ciphertextBuffer = decoded.ciphertextBuffer;
} catch (e) {
console.warn(e);
window?.log?.error('[path] lokiRpc::processingOnionResponse - decode error', e);
window?.log?.error(
'[path] lokiRpc::processingOnionResponse - symmetricKey',
@ -402,7 +411,7 @@ async function processOnionResponse(
const status = jsonRes.status_code || jsonRes.status;
await processOnionRequestErrorAtDestination({
statusCode: status,
body: ciphertext,
body: plaintext,
destinationEd25519: lsrpcEd25519Key,
associatedWith,
});
@ -451,28 +460,31 @@ function isSnodeResponse(arg: any): arg is SnodeResponse {
async function handle421InvalidSwarm(snodeEd25519: string, body: string, associatedWith?: string) {
// The snode isn't associated with the given public key anymore
// this does not make much sense to have a 421 without a publicKey set.
if (associatedWith) {
try {
const json = JSON.parse(body);
// The snode isn't associated with the given public key anymore
if (json.snodes?.length) {
// the snode gave us the new swarm. Save it for the next retry
window?.log?.warn('Wrong swarm, now looking at snodes', json.snodes);
return updateSwarmFor(associatedWith, json.snodes);
}
// remove this node from the swarm of this pubkey
return dropSnodeFromSwarmIfNeeded(associatedWith, snodeEd25519);
} catch (e) {
window?.log?.warn(
'Got error while parsing 421 result. Dropping this snode from the swarm of this pubkey',
e
);
// could not parse result. Consider that this snode as invalid
return dropSnodeFromSwarmIfNeeded(associatedWith, snodeEd25519);
}
if (!associatedWith) {
window?.log?.warn('Got a 421 without an associatedWith publickey');
return;
}
try {
const json = JSON.parse(body);
// The snode isn't associated with the given public key anymore
if (json.snodes?.length) {
// the snode gave us the new swarm. Save it for the next retry
window?.log?.warn('Wrong swarm, now looking at snodes', json.snodes);
return updateSwarmFor(associatedWith, json.snodes);
}
// remove this node from the swarm of this pubkey
return dropSnodeFromSwarmIfNeeded(associatedWith, snodeEd25519);
} catch (e) {
console.warn('dropSnodeFromSwarmIfNeeded', snodeEd25519);
window?.log?.warn(
'Got error while parsing 421 result. Dropping this snode from the swarm of this pubkey',
e
);
// could not parse result. Consider that this snode as invalid
return dropSnodeFromSwarmIfNeeded(associatedWith, snodeEd25519);
}
window?.log?.warn('Got a 421 without an associatedWith publickey');
}
/**
@ -543,6 +555,8 @@ const sendOnionRequestHandlingSnodeEject = async ({
abortSignal?: AbortSignal;
associatedWith?: string;
}): Promise<SnodeResponse> => {
// this sendOnionRequest() call has to be the only one like this.
// If you need to call it, call it through sendOnionRequestHandlingSnodeEject because this is the one handling path rebuilding and known errors
const { response, decodingSymmetricKey } = await sendOnionRequest({
nodePath,
destX25519Any,

View file

@ -156,9 +156,6 @@ export async function getRandomSnode(excludingEd25519Snode?: Array<string>): Pro
// used for tests
throw new Error('SeedNodeError');
}
console.warn('randomSnodePool', randomSnodePool.length);
console.warn('excludingEd25519Snode', excludingEd25519Snode.length);
console.warn('snodePoolExcluding', snodePoolExcluding.length);
return _.sample(snodePoolExcluding) as Snode;
}

View file

@ -0,0 +1,285 @@
// tslint:disable: no-implicit-dependencies max-func-body-length no-unused-expression
import chai from 'chai';
import * as sinon from 'sinon';
import _ from 'lodash';
import { describe } from 'mocha';
import { TestUtils } from '../../../test-utils';
import * as SNodeAPI from '../../../../../ts/session/snode_api/';
import chaiAsPromised from 'chai-as-promised';
import { OnionPaths } from '../../../../session/onions/';
import { processOnionResponse } from '../../../../session/snode_api/onions';
import AbortController from 'abort-controller';
import * as Data from '../../../../../ts/data/data';
import { Snode } from '../../../../session/snode_api/snodePool';
import { fromArrayBufferToBase64 } from '../../../../session/utils/String';
import { Onions } from '../../../../../ts/session/snode_api/';
chai.use(chaiAsPromised as any);
chai.should();
const { expect } = chai;
const getFakeResponse = (statusCode?: number, body?: string) => {
return {
status: statusCode || 0,
text: async () => body || '',
};
};
// tslint:disable-next-line: max-func-body-length
describe('OnionPathsErrors', () => {
// Initialize new stubbed cache
const sandbox = sinon.createSandbox();
// tslint:disable-next-line: one-variable-per-declaration
let guardPubkeys: Array<string>,
otherNodesPubkeys: Array<string>,
guard1ed: string,
guard2ed: string,
guard3ed: string,
guardNodesArray: Array<Snode>,
guardSnode1: Snode,
otherNodesArray: Array<Snode>,
fakeSnodePool: Array<Snode>,
associatedWith: string,
fakeSwarmForAssocatedWith: Array<string>;
const fakeIP = '8.8.8.8';
let fakePortCurrent = 20000;
beforeEach(() => {
guardPubkeys = TestUtils.generateFakePubKeys(3).map(n => n.key);
otherNodesPubkeys = TestUtils.generateFakePubKeys(9).map(n => n.key);
guard1ed = guardPubkeys[0];
guard2ed = guardPubkeys[1];
guard3ed = guardPubkeys[2];
guardNodesArray = guardPubkeys.map(ed25519 => {
fakePortCurrent++;
return {
ip: fakeIP,
port: fakePortCurrent,
pubkey_ed25519: ed25519,
pubkey_x25519: ed25519,
version: '',
};
});
guardSnode1 = guardNodesArray[0];
otherNodesArray = otherNodesPubkeys.map(ed25519 => {
fakePortCurrent++;
return {
ip: fakeIP,
port: fakePortCurrent,
pubkey_ed25519: ed25519,
pubkey_x25519: ed25519,
version: '',
};
});
fakeSnodePool = [...guardNodesArray, ...otherNodesArray];
associatedWith = TestUtils.generateFakePubKey().key;
fakeSwarmForAssocatedWith = otherNodesPubkeys.slice(0, 6);
// Utils Stubs
sandbox.stub(OnionPaths, 'selectGuardNodes').resolves(guardNodesArray);
sandbox.stub(SNodeAPI.SNodeAPI, 'getSnodePoolFromSnode').resolves(guardNodesArray);
TestUtils.stubData('getGuardNodes').resolves([guard1ed, guard2ed, guard3ed]);
TestUtils.stubWindow('getSeedNodeList', () => ['seednode1']);
sandbox.stub(SNodeAPI.SnodePool, 'refreshRandomPoolDetail').resolves(fakeSnodePool);
OnionPaths.clearTestOnionPath();
});
afterEach(() => {
TestUtils.restoreStubs();
sandbox.restore();
});
describe('processOnionResponse', () => {
it('throws a non-retryable error when the request is aborted', async () => {
const abortController = new AbortController();
abortController.abort();
try {
await processOnionResponse(
getFakeResponse(),
new Uint8Array(),
guardSnode1,
undefined,
abortController.signal
);
throw new Error('Error expected');
} catch (e) {
expect(e.message).to.equal('Request got aborted');
// this makes sure that this call would not be retried
expect(e.name).to.equal('AbortError');
}
});
it('throws an non retryable error we get a 406 status code', async () => {
try {
await processOnionResponse(getFakeResponse(406), new Uint8Array(), guardSnode1, undefined);
throw new Error('Error expected');
} catch (e) {
expect(e.message).to.equal('You clock is out of sync with the network. Check your clock.');
// this makes sure that this call would not be retried
expect(e.name).to.equal('AbortError');
}
});
describe('processOnionResponse - 421', () => {
it('throws a retryable error if we get a 421 status code without a new swarm', async () => {
sandbox.stub(Data, 'getSwarmNodesForPubkey').resolves(fakeSwarmForAssocatedWith);
const updateSwarmSpy = sandbox.stub(Data, 'updateSwarmNodesForPubkey').resolves();
const targetNode = otherNodesPubkeys[0];
try {
await processOnionResponse(
getFakeResponse(421),
new Uint8Array(),
guardSnode1,
targetNode,
undefined,
associatedWith
);
throw new Error('Error expected');
} catch (e) {
expect(e.message).to.equal('Bad Path handled. Retry this request. Status: 421');
}
expect(updateSwarmSpy.callCount).to.eq(1);
// if we don't get a new swarm in the returned json, we drop the target node considering it is a bad snode
expect(updateSwarmSpy.args[0][1]).to.deep.eq(
fakeSwarmForAssocatedWith.filter(m => m !== targetNode)
);
});
it('throws a retryable error we get a 421 status code with a new swarm', async () => {
sandbox.stub(Data, 'getSwarmNodesForPubkey').resolves(fakeSwarmForAssocatedWith);
const updateSwarmSpy = sandbox.stub(Data, 'updateSwarmNodesForPubkey').resolves();
const targetNode = otherNodesPubkeys[0];
const resultExpected: Array<Snode> = [
otherNodesArray[4],
otherNodesArray[5],
otherNodesArray[6],
];
try {
await processOnionResponse(
getFakeResponse(421, JSON.stringify({ snodes: resultExpected })),
new Uint8Array(),
guardSnode1,
targetNode,
undefined,
associatedWith
);
throw new Error('Error expected');
} catch (e) {
expect(e.message).to.equal('Bad Path handled. Retry this request. Status: 421');
}
expect(updateSwarmSpy.callCount).to.eq(1);
// we got 3 snode in the results, this is our new swarm for this associated with pubkey
expect(updateSwarmSpy.args[0][1]).to.deep.eq(resultExpected.map(m => m.pubkey_ed25519));
});
it('throws a retryable error we get a 421 status code with invalid json body', async () => {
sandbox.stub(Data, 'getSwarmNodesForPubkey').resolves(fakeSwarmForAssocatedWith);
const updateSwarmSpy = sandbox.stub(Data, 'updateSwarmNodesForPubkey').resolves();
const targetNode = otherNodesPubkeys[0];
try {
await processOnionResponse(
getFakeResponse(421, 'THIS IS SOME INVALID JSON'),
new Uint8Array(),
guardSnode1,
targetNode,
undefined,
associatedWith
);
throw new Error('Error expected');
} catch (e) {
expect(e.message).to.equal('Bad Path handled. Retry this request. Status: 421');
}
expect(updateSwarmSpy.callCount).to.eq(1);
// we got 3 snode in the results, this is our new swarm for this associated with pubkey
expect(updateSwarmSpy.args[0][1]).to.deep.eq(
fakeSwarmForAssocatedWith.filter(m => m !== targetNode)
);
});
it('throws a retryable error we get a 421 status code inside the content of the json', async () => {
sandbox.stub(Data, 'getSwarmNodesForPubkey').resolves(fakeSwarmForAssocatedWith);
const updateSwarmSpy = sandbox.stub(Data, 'updateSwarmNodesForPubkey').resolves();
const targetNode = otherNodesPubkeys[0];
const json = JSON.stringify({ status: 421 });
TestUtils.stubWindow('libloki', {
crypto: {
DecryptAESGCM: async (s: any, e: string) => e,
} as any,
});
sandbox
.stub(Onions, 'decodeOnionResult')
.resolves({ plaintext: json, ciphertextBuffer: new Uint8Array() });
try {
await processOnionResponse(
getFakeResponse(200, fromArrayBufferToBase64(Buffer.from(json))),
new Uint8Array(),
guardSnode1,
targetNode,
undefined,
associatedWith
);
throw new Error('Error expected');
} catch (e) {
expect(e.message).to.equal('Bad Path handled. Retry this request. Status: 421');
}
expect(updateSwarmSpy.callCount).to.eq(1);
// 421 without swarm included means drop the target node only
expect(updateSwarmSpy.args[0][1]).to.deep.eq(
fakeSwarmForAssocatedWith.filter(m => m !== targetNode)
);
});
it('throws a retryable error we get a 421 status code inside the content of the json', async () => {
sandbox.stub(Data, 'getSwarmNodesForPubkey').resolves(fakeSwarmForAssocatedWith);
const updateSwarmSpy = sandbox.stub(Data, 'updateSwarmNodesForPubkey').resolves();
const targetNode = otherNodesPubkeys[0];
const resultExpected: Array<Snode> = [
otherNodesArray[4],
otherNodesArray[5],
otherNodesArray[6],
];
const json = JSON.stringify({ status: 421, snodes: resultExpected });
TestUtils.stubWindow('libloki', {
crypto: {
DecryptAESGCM: async (s: any, e: string) => e,
} as any,
});
sandbox
.stub(Onions, 'decodeOnionResult')
.resolves({ plaintext: json, ciphertextBuffer: new Uint8Array() });
try {
await processOnionResponse(
getFakeResponse(200, json),
new Uint8Array(),
guardSnode1,
targetNode,
undefined,
associatedWith
);
throw new Error('Error expected');
} catch (e) {
expect(e.message).to.equal('Bad Path handled. Retry this request. Status: 421');
}
expect(updateSwarmSpy.callCount).to.eq(1);
// 421 without swarm included means drop the target node only
expect(updateSwarmSpy.args[0][1]).to.deep.eq(resultExpected.map(m => m.pubkey_ed25519));
});
});
});
});

View file

@ -399,10 +399,10 @@
resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.11.tgz#d3614d6c5f500142358e6ed24e1bf16657536c50"
integrity sha512-t7uW6eFafjO+qJ3BIV2gGUyZs27egcNRkUdalkud+Qa3+kg//f129iuOFivHDXQ+vnU3fDXuwgv0cqMCbcE8sw==
"@types/chai@4.1.2":
version "4.1.2"
resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.1.2.tgz#f1af664769cfb50af805431c407425ed619daa21"
integrity sha512-D8uQwKYUw2KESkorZ27ykzXgvkDJYXVEihGklgfp5I4HUP8D6IxtcdLTMB1emjQiWzV7WZ5ihm1cxIzVwjoleQ==
"@types/chai@^4.2.18":
version "4.2.18"
resolved "https://registry.yarnpkg.com/@types/chai/-/chai-4.2.18.tgz#0c8e298dbff8205e2266606c1ea5fbdba29b46e4"
integrity sha512-rS27+EkB/RE1Iz3u0XtVL5q36MGDWbgYe7zWiodyKNUnthxY0rukK5V36eiUCtCisB7NN8zKYH6DO2M37qxFEQ==
"@types/classnames@2.2.3":
version "2.2.3"
@ -1283,7 +1283,7 @@ assert@^1.1.1:
object-assign "^4.1.1"
util "0.10.3"
assertion-error@^1.0.1:
assertion-error@^1.1.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-1.1.0.tgz#e60b6b0e8f301bd97e5375215bda406c85118c0b"
integrity sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==
@ -2118,17 +2118,17 @@ chai-bytes@^0.1.2:
resolved "https://registry.yarnpkg.com/chai-bytes/-/chai-bytes-0.1.2.tgz#c297e81d47eb3106af0676ded5bb5e0c9f981db3"
integrity sha512-0ol6oJS0y1ozj6AZK8n1pyv1/G+l44nqUJygAkK1UrYl+IOGie5vcrEdrAlwmLYGIA9NVvtHWosPYwWWIXf/XA==
chai@4.1.2:
version "4.1.2"
resolved "https://registry.yarnpkg.com/chai/-/chai-4.1.2.tgz#0f64584ba642f0f2ace2806279f4f06ca23ad73c"
integrity sha1-D2RYS6ZC8PKs4oBiefTwbKI61zw=
chai@4.3.4:
version "4.3.4"
resolved "https://registry.yarnpkg.com/chai/-/chai-4.3.4.tgz#b55e655b31e1eac7099be4c08c21964fce2e6c49"
integrity sha512-yS5H68VYOCtN1cjfwumDSuzn/9c+yza4f3reKXlE5rUg7SFcCEy90gJvydNgOYtblyf4Zi6jIWRnXOgErta0KA==
dependencies:
assertion-error "^1.0.1"
check-error "^1.0.1"
deep-eql "^3.0.0"
assertion-error "^1.1.0"
check-error "^1.0.2"
deep-eql "^3.0.1"
get-func-name "^2.0.0"
pathval "^1.0.0"
type-detect "^4.0.0"
pathval "^1.1.1"
type-detect "^4.0.5"
chainsaw@~0.1.0:
version "0.1.0"
@ -2198,7 +2198,7 @@ chardet@^0.4.0:
resolved "https://registry.yarnpkg.com/chardet/-/chardet-0.4.2.tgz#b5473b33dc97c424e5d98dc87d55d4d8a29c8bf2"
integrity sha1-tUc7M9yXxCTl2Y3IfVXU2KKci/I=
check-error@^1.0.1, check-error@^1.0.2:
check-error@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/check-error/-/check-error-1.0.2.tgz#574d312edd88bb5dd8912e9286dd6c0aed4aac82"
integrity sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=
@ -3194,7 +3194,7 @@ deep-diff@^0.3.5:
resolved "https://registry.yarnpkg.com/deep-diff/-/deep-diff-0.3.8.tgz#c01de63efb0eec9798801d40c7e0dae25b582c84"
integrity sha1-wB3mPvsO7JeYgB1Ax+Da4ltYLIQ=
deep-eql@^3.0.0:
deep-eql@^3.0.1:
version "3.0.1"
resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-3.0.1.tgz#dfc9404400ad1c8fe023e7da1df1c147c4b444df"
integrity sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==
@ -8315,10 +8315,10 @@ path-type@^4.0.0:
resolved "https://registry.yarnpkg.com/path-type/-/path-type-4.0.0.tgz#84ed01c0a7ba380afe09d90a8c180dcd9d03043b"
integrity sha512-gDKb8aZMDeD/tZWs9P6+q0J9Mwkdl6xMV8TjnGP3qJVJ06bdMgkbBlLU8IdfOsIsFz2BW1rNVT3XuNEl8zPAvw==
pathval@^1.0.0:
version "1.1.0"
resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.0.tgz#b942e6d4bde653005ef6b71361def8727d0645e0"
integrity sha1-uULm1L3mUwBe9rcTYd74cn0GReA=
pathval@^1.1.1:
version "1.1.1"
resolved "https://registry.yarnpkg.com/pathval/-/pathval-1.1.1.tgz#8534e77a77ce7ac5a2512ea21e0fdb8fcf6c3d8d"
integrity sha512-Dp6zGqpTdETdR63lehJYPeIOqpiNBNtc7BpWSLrOje7UaIsE5aY92r/AunQA7rsXvet3lrJ3JnZX29UPTKXyKQ==
pbkdf2@^3.0.3:
version "3.0.17"
@ -11599,7 +11599,7 @@ type-check@~0.3.2:
dependencies:
prelude-ls "~1.1.2"
type-detect@4.0.8, type-detect@^4.0.0, type-detect@^4.0.8:
type-detect@4.0.8, type-detect@^4.0.0, type-detect@^4.0.5, type-detect@^4.0.8:
version "4.0.8"
resolved "https://registry.yarnpkg.com/type-detect/-/type-detect-4.0.8.tgz#7646fb5f18871cfbb7749e69bd39a6388eb7450c"
integrity sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==