add some [perf] duration measurement

This commit is contained in:
Audric Ackermann 2022-04-14 13:21:09 +10:00
parent af9d2fdfa1
commit 6001da5ac8
No known key found for this signature in database
GPG Key ID: 999F434D76324AD4
8 changed files with 29 additions and 34 deletions

View File

@ -80,7 +80,6 @@ module.exports = grunt => {
grunt.registerTask('default', [
'exec:build-protobuf',
'exec:transpile',
'concat',
'sass',
'date',
'getCommitHash',

View File

@ -1748,7 +1748,7 @@ const trotthledAllConversationsDispatch = _.throttle(() => {
window.inboxStore?.dispatch(conversationsChanged([...updatesToDispatch.values()]));
updatesToDispatch.clear();
}, 500);
}, 2000);
const updatesToDispatch: Map<string, ReduxConversationType> = new Map();

View File

@ -109,7 +109,7 @@ async function decryptForClosedGroup(envelope: EnvelopePlus, ciphertext: ArrayBu
*
*/
window?.log?.warn('decryptWithSessionProtocol for medium group message throw:', e);
window?.log?.warn('decryptWithSessionProtocol for medium group message throw:', e.message);
const groupPubKey = PubKey.cast(envelope.source);
// IMPORTANT do not remove the message from the cache just yet.

View File

@ -20,24 +20,9 @@ const queue = new Queue({
interval: 500,
});
queue.on('dequeue', () => {
// window.log.info('[profile-update] queue is dequeuing');
});
queue.on('resolve', () => {
// window.log.info('[profile-update] task resolved');
});
queue.on('reject', error => {
window.log.warn('[profile-update] task profile image update failed with', error);
});
queue.on('start', () => {
window.log.info('[profile-update] queue is starting');
});
queue.on('stop', () => {
window.log.info('[profile-update] queue is stopping');
});
queue.on('end', () => {
window.log.info('[profile-update] queue is ending');
});
export async function appendFetchAvatarAndProfileJob(
conversation: ConversationModel,
@ -51,16 +36,13 @@ export async function appendFetchAvatarAndProfileJob(
const oneAtaTimeStr = `appendFetchAvatarAndProfileJob:${conversation.id}`;
if (hasAlreadyOneAtaTimeMatching(oneAtaTimeStr)) {
window.log.info(
'[profile-update] not adding another task of "appendFetchAvatarAndProfileJob" as there is already one scheduled for the conversation: ',
conversation.id
);
// window.log.debug(
// '[profile-update] not adding another task of "appendFetchAvatarAndProfileJob" as there is already one scheduled for the conversation: ',
// conversation.id
// );
return;
}
window.log.info(
'[profile-update] "appendFetchAvatarAndProfileJob" as there is already one scheduled for the conversation: ',
conversation.id
);
window.log.info(`[profile-update] queuing fetching avatar for ${conversation.id}`);
const task = allowOnlyOneAtATime(oneAtaTimeStr, async () => {
return createOrUpdateProfile(conversation, profile, profileKey);
});
@ -98,6 +80,10 @@ async function createOrUpdateProfile(
// Retain old values unless changed:
const newProfile = conversation.get('profile') || {};
let changes = false;
if (newProfile.displayName !== profile.displayName) {
changes = true;
}
newProfile.displayName = profile.displayName;
if (profile.profilePicture && profileKey) {
@ -106,6 +92,7 @@ async function createOrUpdateProfile(
if (needsUpdate) {
try {
window.log.debug(`[profile-update] starting downloading task for ${conversation.id}`);
const downloaded = await downloadAttachment({
url: profile.profilePicture,
isRaw: true,
@ -137,6 +124,7 @@ async function createOrUpdateProfile(
}
}
newProfile.avatar = path;
changes = true;
} catch (e) {
window.log.warn(
`[profile-update] Failed to download attachment at ${profile.profilePicture}. Maybe it expired? ${e.message}`
@ -145,6 +133,9 @@ async function createOrUpdateProfile(
}
}
} else if (profileKey) {
if (newProfile.avatar !== null) {
changes = true;
}
newProfile.avatar = null;
}
@ -153,5 +144,7 @@ async function createOrUpdateProfile(
ConversationTypeEnum.PRIVATE
);
await conv.setLokiProfile(newProfile);
await conv.commit();
if (changes) {
await conv.commit();
}
}

View File

@ -48,10 +48,11 @@ export const parseMessages = async (
rawMessages: Array<Record<string, any>>
): Promise<Array<OpenGroupMessageV2>> => {
if (!rawMessages || rawMessages.length === 0) {
window?.log?.info('no new messages');
return [];
}
const startParse = Date.now();
const opengroupMessagesSignatureUnchecked = _.compact(
rawMessages.map(rawMessage => {
try {
@ -73,6 +74,7 @@ export const parseMessages = async (
}
})
);
window.log.debug(`[perf] parseMessage took ${Date.now() - startParse}ms`);
const sentToWorker = opengroupMessagesSignatureUnchecked.map(m => {
return {
@ -81,14 +83,14 @@ export const parseMessages = async (
base64EncodedData: m.opengroupv2Message.base64EncodedData,
};
});
const startVerify = Date.now();
const now = Date.now();
// this filters out any invalid signature and returns the array of valid encoded data
const signatureValidEncodedData = (await callUtilsWorker(
'verifyAllSignatures',
sentToWorker
)) as Array<string>;
window.log.info(`[perf] verifyAllSignatures took ${Date.now() - now}ms.`);
window.log.info(`[perf] verifyAllSignatures took ${Date.now() - startVerify}ms.`);
const parsedMessages = opengroupMessagesSignatureUnchecked
.filter(m => signatureValidEncodedData.includes(m.opengroupv2Message.base64EncodedData))

View File

@ -26,8 +26,9 @@ export const filterDuplicatesFromDbAndIncoming = async (
);
window.log.debug(
`filterDuplicatesFromDbAndIncoming of ${newMessages.length} messages took ${Date.now() -
start}ms.`
`[perf] filterDuplicatesFromDbAndIncoming took ${Date.now() - start}ms for ${
newMessages.length
} messages`
);
const opengroupMessagesFiltered = filteredInDb?.map(f => {
return newMessages.find(m => m.sender === f.sender && m.sentTimestamp === f.serverTimestamp);

View File

@ -59,7 +59,7 @@ export function getUnpaddedAttachment(
data: ArrayBuffer,
unpaddedExpectedSize: number
): ArrayBuffer | null {
window?.log?.info('Removing attachment padding...');
// window?.log?.debug('Removing attachment padding...');
// to have a padding we must have a strictly longer length expected
if (data.byteLength <= unpaddedExpectedSize) {

View File

@ -255,7 +255,7 @@ export async function autoScale<T extends { contentType: string; blob: Blob }>(
if (readAndResizedBlob.size > maxSize) {
throw new Error('Cannot add this attachment even after trying to scale it down.');
}
window.log.debug(`autoscale took ${Date.now() - start}ms `);
window.log.debug(`[perf] autoscale took ${Date.now() - start}ms `);
return {
contentType: attachment.contentType,