Move data.js to data.ts

This commit is contained in:
Audric Ackermann 2021-02-15 15:16:38 +11:00
parent 7b81c4213a
commit 8ea9f02cec
63 changed files with 1259 additions and 4198 deletions

View File

@ -25,3 +25,9 @@ ReadSyncs
SyncMessage
sendSyncMessage needs to be rewritten
sendSyncMessageOnly to fix
indexedDB
initializeAttachmentMetadata=>
schemaVersion for messages to put as what needs to be set
run_migration

View File

@ -26,51 +26,21 @@ module.exports = {
initialize,
close,
removeDB,
removeIndexedDBFiles,
setSQLPassword,
getPasswordHash,
savePasswordHash,
removePasswordHash,
createOrUpdateIdentityKey,
getIdentityKeyById,
bulkAddIdentityKeys,
removeIdentityKeyById,
removeAllIdentityKeys,
getAllIdentityKeys,
createOrUpdatePreKey,
getPreKeyById,
getPreKeyByRecipient,
bulkAddPreKeys,
removePreKeyById,
removeAllPreKeys,
getAllPreKeys,
createOrUpdateSignedPreKey,
getSignedPreKeyById,
getAllSignedPreKeys,
bulkAddSignedPreKeys,
removeSignedPreKeyById,
removeAllSignedPreKeys,
createOrUpdateContactPreKey,
getContactPreKeyById,
getContactPreKeyByIdentityKey,
getContactPreKeys,
getAllContactPreKeys,
bulkAddContactPreKeys,
removeContactPreKeyByIdentityKey,
removeAllContactPreKeys,
createOrUpdateContactSignedPreKey,
getContactSignedPreKeyById,
getContactSignedPreKeyByIdentityKey,
getContactSignedPreKeys,
bulkAddContactSignedPreKeys,
removeContactSignedPreKeyByIdentityKey,
removeAllContactSignedPreKeys,
removeAllPreKeys,
removeAllSessions,
createOrUpdateItem,
getItemById,
@ -79,15 +49,6 @@ module.exports = {
removeItemById,
removeAllItems,
createOrUpdateSession,
getSessionById,
getSessionsByNumber,
bulkAddSessions,
removeSessionById,
removeSessionsByNumber,
removeAllSessions,
getAllSessions,
getSwarmNodesForPubkey,
updateSwarmNodesForPubkey,
getGuardNodes,
@ -108,7 +69,6 @@ module.exports = {
getAllConversationIds,
getAllGroupsInvolvingId,
removeAllConversations,
removeAllPrivateConversations,
searchConversations,
searchMessages,
@ -126,7 +86,6 @@ module.exports = {
getUnreadByConversation,
getUnreadCountByConversation,
getMessageBySender,
getMessagesBySender,
getMessageIdsFromServerIds,
getMessageById,
getAllMessages,
@ -158,16 +117,12 @@ module.exports = {
removeAllAttachmentDownloadJobs,
removeAll,
removeAllConfiguration,
getMessagesNeedingUpgrade,
getMessagesWithVisualMediaAttachments,
getMessagesWithFileAttachments,
removeKnownAttachments,
removeAllClosedGroupRatchets,
getAllEncryptionKeyPairsForGroup,
getLatestClosedGroupEncryptionKeyPair,
addClosedGroupEncryptionKeyPair,
@ -906,12 +861,6 @@ async function updateToLokiSchemaVersion3(currentVersion, instance) {
const SENDER_KEYS_TABLE = 'senderKeys';
async function removeAllClosedGroupRatchets(groupId) {
await db.run(`DELETE FROM ${SENDER_KEYS_TABLE} WHERE groupId = $groupId;`, {
$groupId: groupId,
});
}
async function updateToLokiSchemaVersion4(currentVersion, instance) {
if (currentVersion >= 4) {
return;
@ -1181,11 +1130,8 @@ async function createLokiSchemaTable(instance) {
let db;
let filePath;
let indexedDBPath;
function _initializePaths(configDir) {
indexedDBPath = path.join(configDir, 'IndexedDB');
const dbDir = path.join(configDir, 'sql');
mkdirp.sync(dbDir);
@ -1297,18 +1243,6 @@ async function removeDB(configDir = null) {
rimraf.sync(filePath);
}
async function removeIndexedDBFiles() {
if (!indexedDBPath) {
throw new Error(
'removeIndexedDBFiles: Need to initialize and set indexedDBPath first!'
);
}
const pattern = path.join(indexedDBPath, '*.leveldb');
rimraf.sync(pattern);
indexedDBPath = null;
}
// Password hash
const PASS_HASH_ID = 'passHash';
async function getPasswordHash() {
@ -1328,18 +1262,9 @@ async function removePasswordHash() {
}
const IDENTITY_KEYS_TABLE = 'identityKeys';
async function createOrUpdateIdentityKey(data) {
return createOrUpdate(IDENTITY_KEYS_TABLE, data);
}
async function getIdentityKeyById(id, instance) {
return getById(IDENTITY_KEYS_TABLE, id, instance);
}
async function bulkAddIdentityKeys(array) {
return bulkAdd(IDENTITY_KEYS_TABLE, array);
}
async function removeIdentityKeyById(id) {
return removeById(IDENTITY_KEYS_TABLE, id);
}
async function removeAllIdentityKeys() {
return removeAllFromTable(IDENTITY_KEYS_TABLE);
}
@ -1348,203 +1273,24 @@ async function getAllIdentityKeys() {
}
const PRE_KEYS_TABLE = 'preKeys';
async function createOrUpdatePreKey(data) {
const { id, recipient } = data;
if (!id) {
throw new Error('createOrUpdate: Provided data did not have a truthy id');
}
await db.run(
`INSERT OR REPLACE INTO ${PRE_KEYS_TABLE} (
id,
recipient,
json
) values (
$id,
$recipient,
$json
)`,
{
$id: id,
$recipient: recipient || '',
$json: objectToJSON(data),
}
);
}
async function getPreKeyById(id) {
return getById(PRE_KEYS_TABLE, id);
}
async function getPreKeyByRecipient(recipient) {
const row = await db.get(
`SELECT * FROM ${PRE_KEYS_TABLE} WHERE recipient = $recipient;`,
{
$recipient: recipient,
}
);
if (!row) {
return null;
}
return jsonToObject(row.json);
}
async function bulkAddPreKeys(array) {
return bulkAdd(PRE_KEYS_TABLE, array);
}
async function removePreKeyById(id) {
return removeById(PRE_KEYS_TABLE, id);
}
async function removeAllPreKeys() {
return removeAllFromTable(PRE_KEYS_TABLE);
}
async function getAllPreKeys() {
return getAllFromTable(PRE_KEYS_TABLE);
}
const CONTACT_PRE_KEYS_TABLE = 'contactPreKeys';
async function createOrUpdateContactPreKey(data) {
const { keyId, identityKeyString } = data;
await db.run(
`INSERT OR REPLACE INTO ${CONTACT_PRE_KEYS_TABLE} (
keyId,
identityKeyString,
json
) values (
$keyId,
$identityKeyString,
$json
)`,
{
$keyId: keyId,
$identityKeyString: identityKeyString || '',
$json: objectToJSON(data),
}
);
}
async function getContactPreKeyById(id) {
return getById(CONTACT_PRE_KEYS_TABLE, id);
}
async function getContactPreKeyByIdentityKey(key) {
const row = await db.get(
`SELECT * FROM ${CONTACT_PRE_KEYS_TABLE} WHERE identityKeyString = $identityKeyString ORDER BY keyId DESC LIMIT 1;`,
{
$identityKeyString: key,
}
);
if (!row) {
return null;
}
return jsonToObject(row.json);
}
async function getContactPreKeys(keyId, identityKeyString) {
const query = `SELECT * FROM ${CONTACT_PRE_KEYS_TABLE} WHERE identityKeyString = $identityKeyString AND keyId = $keyId;`;
const rows = await db.all(query, {
$keyId: keyId,
$identityKeyString: identityKeyString,
});
return map(rows, row => jsonToObject(row.json));
}
async function bulkAddContactPreKeys(array) {
return bulkAdd(CONTACT_PRE_KEYS_TABLE, array);
}
async function removeContactPreKeyByIdentityKey(key) {
await db.run(
`DELETE FROM ${CONTACT_PRE_KEYS_TABLE} WHERE identityKeyString = $identityKeyString;`,
{
$identityKeyString: key,
}
);
}
async function removeAllContactPreKeys() {
return removeAllFromTable(CONTACT_PRE_KEYS_TABLE);
}
const CONTACT_SIGNED_PRE_KEYS_TABLE = 'contactSignedPreKeys';
async function createOrUpdateContactSignedPreKey(data) {
const { keyId, identityKeyString } = data;
await db.run(
`INSERT OR REPLACE INTO ${CONTACT_SIGNED_PRE_KEYS_TABLE} (
keyId,
identityKeyString,
json
) values (
$keyId,
$identityKeyString,
$json
)`,
{
$keyId: keyId,
$identityKeyString: identityKeyString || '',
$json: objectToJSON(data),
}
);
}
async function getContactSignedPreKeyById(id) {
return getById(CONTACT_SIGNED_PRE_KEYS_TABLE, id);
}
async function getContactSignedPreKeyByIdentityKey(key) {
const row = await db.get(
`SELECT * FROM ${CONTACT_SIGNED_PRE_KEYS_TABLE} WHERE identityKeyString = $identityKeyString ORDER BY keyId DESC;`,
{
$identityKeyString: key,
}
);
if (!row) {
return null;
}
return jsonToObject(row.json);
}
async function getContactSignedPreKeys(keyId, identityKeyString) {
const query = `SELECT * FROM ${CONTACT_SIGNED_PRE_KEYS_TABLE} WHERE identityKeyString = $identityKeyString AND keyId = $keyId;`;
const rows = await db.all(query, {
$keyId: keyId,
$identityKeyString: identityKeyString,
});
return map(rows, row => jsonToObject(row.json));
}
async function bulkAddContactSignedPreKeys(array) {
return bulkAdd(CONTACT_SIGNED_PRE_KEYS_TABLE, array);
}
async function removeContactSignedPreKeyByIdentityKey(key) {
await db.run(
`DELETE FROM ${CONTACT_SIGNED_PRE_KEYS_TABLE} WHERE identityKeyString = $identityKeyString;`,
{
$identityKeyString: key,
}
);
}
async function removeAllContactSignedPreKeys() {
return removeAllFromTable(CONTACT_SIGNED_PRE_KEYS_TABLE);
}
const SIGNED_PRE_KEYS_TABLE = 'signedPreKeys';
async function createOrUpdateSignedPreKey(data) {
return createOrUpdate(SIGNED_PRE_KEYS_TABLE, data);
}
async function getSignedPreKeyById(id) {
return getById(SIGNED_PRE_KEYS_TABLE, id);
}
async function getAllSignedPreKeys() {
const rows = await db.all('SELECT json FROM signedPreKeys ORDER BY id ASC;');
return map(rows, row => jsonToObject(row.json));
}
async function getAllContactPreKeys() {
const rows = await db.all('SELECT json FROM contactPreKeys ORDER BY id ASC;');
return map(rows, row => jsonToObject(row.json));
}
async function bulkAddSignedPreKeys(array) {
return bulkAdd(SIGNED_PRE_KEYS_TABLE, array);
}
async function removeSignedPreKeyById(id) {
return removeById(SIGNED_PRE_KEYS_TABLE, id);
}
async function removeAllSignedPreKeys() {
return removeAllFromTable(SIGNED_PRE_KEYS_TABLE);
}
@ -1607,62 +1353,9 @@ async function removeAllItems() {
}
const SESSIONS_TABLE = 'sessions';
async function createOrUpdateSession(data) {
const { id, number } = data;
if (!id) {
throw new Error(
'createOrUpdateSession: Provided data did not have a truthy id'
);
}
if (!number) {
throw new Error(
'createOrUpdateSession: Provided data did not have a truthy number'
);
}
await db.run(
`INSERT OR REPLACE INTO sessions (
id,
number,
json
) values (
$id,
$number,
$json
)`,
{
$id: id,
$number: number,
$json: objectToJSON(data),
}
);
}
async function getSessionById(id) {
return getById(SESSIONS_TABLE, id);
}
async function getSessionsByNumber(number) {
const rows = await db.all('SELECT * FROM sessions WHERE number = $number;', {
$number: number,
});
return map(rows, row => jsonToObject(row.json));
}
async function bulkAddSessions(array) {
return bulkAdd(SESSIONS_TABLE, array);
}
async function removeSessionById(id) {
return removeById(SESSIONS_TABLE, id);
}
async function removeSessionsByNumber(number) {
await db.run('DELETE FROM sessions WHERE number = $number;', {
$number: number,
});
}
async function removeAllSessions() {
return removeAllFromTable(SESSIONS_TABLE);
}
async function getAllSessions() {
return getAllFromTable(SESSIONS_TABLE);
}
async function createOrUpdate(table, data) {
const { id } = data;
@ -2412,20 +2105,6 @@ async function getMessageBySender({ source, sourceDevice, sent_at }) {
return map(rows, row => jsonToObject(row.json));
}
async function getMessagesBySender({ source, sourceDevice }) {
const rows = await db.all(
`SELECT json FROM ${MESSAGES_TABLE} WHERE
source = $source AND
sourceDevice = $sourceDevice`,
{
$source: source,
$sourceDevice: sourceDevice,
}
);
return map(rows, row => jsonToObject(row.json));
}
async function getAllUnsentMessages() {
const rows = await db.all(`
SELECT json FROM ${MESSAGES_TABLE} WHERE
@ -2836,43 +2515,10 @@ function getRemoveConfigurationPromises() {
];
}
// Anything that isn't user-visible data
async function removeAllConfiguration() {
let promise;
db.serialize(() => {
promise = Promise.all([
db.run('BEGIN TRANSACTION;'),
...getRemoveConfigurationPromises(),
db.run('COMMIT TRANSACTION;'),
]);
});
await promise;
}
async function removeAllConversations() {
await removeAllFromTable(CONVERSATIONS_TABLE);
}
async function removeAllPrivateConversations() {
await db.run(`DELETE FROM ${CONVERSATIONS_TABLE} WHERE type = 'private'`);
}
async function getMessagesNeedingUpgrade(limit, { maxVersion }) {
const rows = await db.all(
`SELECT json FROM ${MESSAGES_TABLE}
WHERE schemaVersion IS NULL OR schemaVersion < $maxVersion
LIMIT $limit;`,
{
$maxVersion: maxVersion,
$limit: limit,
}
);
return map(rows, row => jsonToObject(row.json));
}
async function getMessagesWithVisualMediaAttachments(
conversationId,
{ limit }

View File

@ -67,19 +67,6 @@
// of preload.js processing
window.setImmediate = window.nodeSetImmediate;
const { IdleDetector, MessageDataMigrator } = Signal.Workflow;
const {
mandatoryMessageUpgrade,
migrateAllToSQLCipher,
removeDatabase,
runMigrations,
doesDatabaseExist,
} = Signal.IndexedDB;
const { Message } = window.Signal.Types;
const {
upgradeMessageSchema,
writeNewAttachmentData,
} = window.Signal.Migrations;
const { Views } = window.Signal;
// Implicitly used in `indexeddb-backbonejs-adapter`:
@ -100,7 +87,6 @@
}, 2000);
}
let idleDetector;
let initialLoadComplete = false;
let newVersion = false;
@ -133,13 +119,6 @@
const cancelInitializationMessage = Views.Initialization.setMessage();
const isIndexedDBPresent = await doesDatabaseExist();
if (isIndexedDBPresent) {
window.installStorage(window.legacyStorage);
window.log.info('Start IndexedDB migrations');
await runMigrations();
}
window.log.info('Storage fetch');
storage.fetch();
@ -148,12 +127,7 @@
if (specialConvInited) {
return;
}
const publicConversations = await window.Signal.Data.getAllPublicConversations(
{
ConversationCollection:
window.models.Conversation.ConversationCollection,
}
);
const publicConversations = await window.Signal.Data.getAllPublicConversations();
publicConversations.forEach(conversation => {
// weird but create the object and does everything we need
conversation.getPublicSendData();
@ -262,9 +236,6 @@
shutdown: async () => {
// Stop background processing
window.Signal.AttachmentDownloads.stop();
if (idleDetector) {
idleDetector.stop();
}
// Stop processing incoming messages
if (messageReceiver) {
@ -292,58 +263,10 @@
await window.Signal.Logs.deleteAll();
}
if (isIndexedDBPresent) {
await mandatoryMessageUpgrade({ upgradeMessageSchema });
await migrateAllToSQLCipher({ writeNewAttachmentData, Views });
await removeDatabase();
try {
await window.Signal.Data.removeIndexedDBFiles();
} catch (error) {
window.log.error(
'Failed to remove IndexedDB files:',
error && error.stack ? error.stack : error
);
}
window.installStorage(window.newStorage);
await window.storage.fetch();
await storage.put('indexeddb-delete-needed', true);
}
Views.Initialization.setMessage(window.i18n('optimizingApplication'));
Views.Initialization.setMessage(window.i18n('loading'));
idleDetector = new IdleDetector();
let isMigrationWithIndexComplete = false;
window.log.info(
`Starting background data migration. Target version: ${Message.CURRENT_SCHEMA_VERSION}`
);
idleDetector.on('idle', async () => {
const NUM_MESSAGES_PER_BATCH = 1;
if (!isMigrationWithIndexComplete) {
const batchWithIndex = await MessageDataMigrator.processNext({
BackboneMessage: window.models.Message.MessageModel,
BackboneMessageCollection: window.models.Message.MessageCollection,
numMessagesPerBatch: NUM_MESSAGES_PER_BATCH,
upgradeMessageSchema,
getMessagesNeedingUpgrade:
window.Signal.Data.getMessagesNeedingUpgrade,
saveMessage: window.Signal.Data.saveMessage,
});
window.log.info('Upgrade message schema (with index):', batchWithIndex);
isMigrationWithIndexComplete = batchWithIndex.done;
}
if (isMigrationWithIndexComplete) {
window.log.info(
'Background migration complete. Stopping idle detector.'
);
idleDetector.stop();
}
});
const themeSetting = window.Events.getThemeSetting();
const newThemeSetting = mapOldThemeToNew(themeSetting);
window.Events.setThemeSetting(newThemeSetting);
@ -351,7 +274,6 @@
try {
await Promise.all([
window.getConversationController().load(),
textsecure.storage.protocol.hydrateCaches(),
BlockedNumberController.load(),
]);
} catch (error) {
@ -706,66 +628,6 @@
window.setMediaPermissions(!value);
};
// Attempts a connection to an open group server
window.attemptConnection = async (serverURL, channelId) => {
let completeServerURL = serverURL.toLowerCase();
const valid = window.libsession.Types.OpenGroup.validate(
completeServerURL
);
if (!valid) {
return new Promise((_resolve, reject) => {
reject(window.i18n('connectToServerFail'));
});
}
// Add http or https prefix to server
completeServerURL = window.libsession.Types.OpenGroup.prefixify(
completeServerURL
);
const rawServerURL = serverURL
.replace(/^https?:\/\//i, '')
.replace(/[/\\]+$/i, '');
const conversationId = `publicChat:${channelId}@${rawServerURL}`;
// Quickly peak to make sure we don't already have it
const conversationExists = window
.getConversationController()
.get(conversationId);
if (conversationExists) {
// We are already a member of this public chat
return new Promise((_resolve, reject) => {
reject(window.i18n('publicChatExists'));
});
}
// Get server
const serverAPI = await window.lokiPublicChatAPI.findOrCreateServer(
completeServerURL
);
// SSL certificate failure or offline
if (!serverAPI) {
// Url incorrect or server not compatible
return new Promise((_resolve, reject) => {
reject(window.i18n('connectToServerFail'));
});
}
// Create conversation
const conversation = await window
.getConversationController()
.getOrCreateAndWait(conversationId, 'group');
// Convert conversation to a public one
await conversation.setPublicSource(completeServerURL, channelId);
// and finally activate it
conversation.getPublicSendData(); // may want "await" if you want to use the API
return conversation;
};
Whisper.events.on('updateGroupName', async groupConvo => {
if (appView) {
appView.showUpdateGroupNameDialog(groupConvo);
@ -1046,10 +908,6 @@
});
window.textsecure.messaging = true;
storage.onready(async () => {
idleDetector.start();
});
}
function onEmpty() {

View File

@ -7,8 +7,6 @@
(function() {
'use strict';
const { getPlaceholderMigrations } = window.Signal.Migrations;
window.Whisper = window.Whisper || {};
window.Whisper.Database = window.Whisper.Database || {};
window.Whisper.Database.id = window.Whisper.Database.id || 'loki-messenger';
@ -125,6 +123,4 @@
request.onsuccess = resolve;
});
Whisper.Database.migrations = getPlaceholderMigrations();
})();

View File

@ -43,11 +43,7 @@
}
const groups = await window.Signal.Data.getAllGroupsInvolvingId(
originalSource,
{
ConversationCollection:
window.models.Conversation.ConversationCollection,
}
originalSource
);
const ids = groups.pluck('id');

View File

@ -10,7 +10,7 @@ const {
saveAttachmentDownloadJob,
saveMessage,
setAttachmentDownloadJobPending,
} = require('./data');
} = require('../../ts/data/data');
const { stringFromBytes } = require('./crypto');
module.exports = {

View File

@ -141,9 +141,7 @@ async function exportConversationList(fileWriter) {
stream.write('{');
stream.write('"conversations": ');
const conversations = await window.Signal.Data.getAllConversations({
ConversationCollection: window.models.Conversation.ConversationCollection,
});
const conversations = await window.Signal.Data.getAllConversations();
window.log.info(`Exporting ${conversations.length} conversations`);
writeArray(stream, getPlainJS(conversations));
@ -257,11 +255,7 @@ async function importFromJsonString(jsonString, targetPath, options) {
await importConversationsFromJSON(conversations, options);
const SAVE_FUNCTIONS = {
identityKeys: window.Signal.Data.createOrUpdateIdentityKey,
items: window.Signal.Data.createOrUpdateItem,
preKeys: window.Signal.Data.createOrUpdatePreKey,
sessions: window.Signal.Data.createOrUpdateSession,
signedPreKeys: window.Signal.Data.createOrUpdateSignedPreKey,
};
await Promise.all(
@ -839,9 +833,7 @@ async function exportConversations(options) {
throw new Error('Need an attachments directory!');
}
const collection = await window.Signal.Data.getAllConversations({
ConversationCollection: window.models.Conversation.ConversationCollection,
});
const collection = await window.Signal.Data.getAllConversations();
const conversations = collection.models;
for (let i = 0, max = conversations.length; i < max; i += 1) {

425
js/modules/data.d.ts vendored
View File

@ -1,425 +0,0 @@
import { KeyPair } from '../../libtextsecure/libsignal-protocol';
import { MessageCollection } from '../../ts/models/message';
import { HexKeyPair } from '../../ts/receiver/closedGroups';
import { ECKeyPair } from '../../ts/receiver/keypairs';
import { PubKey } from '../../ts/session/types';
import { ConversationType } from '../../ts/state/ducks/conversations';
import { Message } from '../../ts/types/Message';
export type IdentityKey = {
id: string;
publicKey: ArrayBuffer;
firstUse: boolean;
nonblockingApproval: boolean;
secretKey?: string; // found in medium groups
};
export type PreKey = {
id: number;
publicKey: ArrayBuffer;
privateKey: ArrayBuffer;
recipient: string;
};
export type SignedPreKey = {
id: number;
publicKey: ArrayBuffer;
privateKey: ArrayBuffer;
created_at: number;
confirmed: boolean;
signature: ArrayBuffer;
};
export type ContactPreKey = {
id: number;
identityKeyString: string;
publicKey: ArrayBuffer;
keyId: number;
};
export type ContactSignedPreKey = {
id: number;
identityKeyString: string;
publicKey: ArrayBuffer;
keyId: number;
signature: ArrayBuffer;
created_at: number;
confirmed: boolean;
};
export type GuardNode = {
ed25519PubKey: string;
};
export type SwarmNode = {
address: string;
ip: string;
port: string;
pubkey_ed25519: string;
pubkey_x25519: string;
};
export type StorageItem = {
id: string;
value: any;
};
export type SessionDataInfo = {
id: string;
number: string;
deviceId: number;
record: string;
};
export type ServerToken = {
serverUrl: string;
token: string;
};
// Basic
export function searchMessages(query: string): Promise<Array<any>>;
export function searchConversations(query: string): Promise<Array<any>>;
export function shutdown(): Promise<void>;
export function close(): Promise<void>;
export function removeDB(): Promise<void>;
export function removeIndexedDBFiles(): Promise<void>;
export function getPasswordHash(): Promise<string | null>;
// Identity Keys
// TODO: identity key has different shape depending on how it is called,
// so we need to come up with a way to make TS work with all of them
export function createOrUpdateIdentityKey(data: any): Promise<void>;
export function getIdentityKeyById(id: string): Promise<IdentityKey | null>;
export function bulkAddIdentityKeys(array: Array<IdentityKey>): Promise<void>;
export function removeIdentityKeyById(id: string): Promise<void>;
export function removeAllIdentityKeys(): Promise<void>;
// Pre Keys
export function createOrUpdatePreKey(data: PreKey): Promise<void>;
export function getPreKeyById(id: number): Promise<PreKey | null>;
export function getPreKeyByRecipient(recipient: string): Promise<PreKey | null>;
export function bulkAddPreKeys(data: Array<PreKey>): Promise<void>;
export function removePreKeyById(id: number): Promise<void>;
export function getAllPreKeys(): Promise<Array<PreKey>>;
// Signed Pre Keys
export function createOrUpdateSignedPreKey(data: SignedPreKey): Promise<void>;
export function getSignedPreKeyById(id: number): Promise<SignedPreKey | null>;
export function getAllSignedPreKeys(): Promise<SignedPreKey | null>;
export function bulkAddSignedPreKeys(array: Array<SignedPreKey>): Promise<void>;
export function removeSignedPreKeyById(id: number): Promise<void>;
export function removeAllSignedPreKeys(): Promise<void>;
// Contact Pre Key
export function createOrUpdateContactPreKey(data: ContactPreKey): Promise<void>;
export function getContactPreKeyById(id: number): Promise<ContactPreKey | null>;
export function getContactPreKeyByIdentityKey(
key: string
): Promise<ContactPreKey | null>;
export function getContactPreKeys(
keyId: number,
identityKeyString: string
): Promise<Array<ContactPreKey>>;
export function getAllContactPreKeys(): Promise<Array<ContactPreKey>>;
export function bulkAddContactPreKeys(
array: Array<ContactPreKey>
): Promise<void>;
export function removeContactPreKeyByIdentityKey(id: number): Promise<void>;
export function removeAllContactPreKeys(): Promise<void>;
// Contact Signed Pre Key
export function createOrUpdateContactSignedPreKey(
data: ContactSignedPreKey
): Promise<void>;
export function getContactSignedPreKeyById(
id: number
): Promise<ContactSignedPreKey | null>;
export function getContactSignedPreKeyByIdentityKey(
key: string
): Promise<ContactSignedPreKey | null>;
export function getContactSignedPreKeys(
keyId: number,
identityKeyString: string
): Promise<Array<ContactSignedPreKey>>;
export function bulkAddContactSignedPreKeys(
array: Array<ContactSignedPreKey>
): Promise<void>;
export function removeContactSignedPreKeyByIdentityKey(
id: string
): Promise<void>;
export function removeAllContactSignedPreKeys(): Promise<void>;
// Guard Nodes
export function getGuardNodes(): Promise<Array<GuardNode>>;
export function updateGuardNodes(nodes: Array<string>): Promise<void>;
// Storage Items
export function createOrUpdateItem(data: StorageItem): Promise<void>;
export function getItemById(id: string): Promise<StorageItem | undefined>;
export function getAlItems(): Promise<Array<StorageItem>>;
export function bulkAddItems(array: Array<StorageItem>): Promise<void>;
export function removeItemById(id: string): Promise<void>;
export function removeAllItems(): Promise<void>;
// Sessions
export function createOrUpdateSession(data: SessionDataInfo): Promise<void>;
export function getAllSessions(): Promise<Array<SessionDataInfo>>;
export function getSessionById(id: string): Promise<SessionDataInfo>;
export function getSessionsByNumber(number: string): Promise<SessionDataInfo>;
export function bulkAddSessions(array: Array<SessionDataInfo>): Promise<void>;
export function removeSessionById(id: string): Promise<void>;
export function removeSessionsByNumber(number: string): Promise<void>;
export function removeAllSessions(): Promise<void>;
// Conversations
export function getConversationCount(): Promise<number>;
export function saveConversation(data: ConversationType): Promise<void>;
export function saveConversations(data: Array<ConversationType>): Promise<void>;
export function updateConversation(
id: string,
data: ConversationType,
{ Conversation }
): Promise<void>;
export function removeConversation(id: string, { Conversation }): Promise<void>;
export function getAllConversations({
ConversationCollection,
}: {
ConversationCollection: any;
}): Promise<ConversationCollection>;
export function getAllConversationIds(): Promise<Array<string>>;
export function getPublicConversationsByServer(
server: string,
{ ConversationCollection }: { ConversationCollection: any }
): Promise<ConversationCollection>;
export function getPubkeysInPublicConversation(
id: string
): Promise<Array<string>>;
export function savePublicServerToken(data: ServerToken): Promise<void>;
export function getPublicServerTokenByServerUrl(
serverUrl: string
): Promise<string>;
export function getAllGroupsInvolvingId(
id: string,
{ ConversationCollection }: { ConversationCollection: any }
): Promise<ConversationCollection>;
// Returns conversation row
// TODO: Make strict return types for search
export function searchConversations(query: string): Promise<any>;
export function searchMessages(query: string): Promise<any>;
export function searchMessagesInConversation(
query: string,
conversationId: string,
{ limit }?: { limit: any }
): Promise<any>;
export function saveMessage(
data: Mesasge,
{ forceSave, Message }?: { forceSave?: any; Message?: any }
): Promise<string>;
export function cleanSeenMessages(): Promise<void>;
export function cleanLastHashes(): Promise<void>;
export function saveSeenMessageHash(data: {
expiresAt: number;
hash: string;
}): Promise<void>;
export function getSwarmNodesForPubkey(pubkey: string): Promise<Array<string>>;
export function updateSwarmNodesForPubkey(
pubkey: string,
snodeEdKeys: Array<string>
): Promise<void>;
// TODO: Strictly type the following
export function updateLastHash(data: any): Promise<any>;
export function saveSeenMessageHashes(data: any): Promise<any>;
export function saveLegacyMessage(data: any): Promise<any>;
export function saveMessages(
arrayOfMessages: any,
{ forceSave }?: any
): Promise<any>;
export function removeMessage(id: string, { Message }?: any): Promise<any>;
export function getUnreadByConversation(
conversationId: string,
{ MessageCollection }?: any
): Promise<any>;
export function getUnreadCountByConversation(
conversationId: string
): Promise<any>;
export function removeAllMessagesInConversation(
conversationId: string,
{ MessageCollection }?: any
): Promise<void>;
export function getMessageBySender(
{
source,
sourceDevice,
sent_at,
}: { source: any; sourceDevice: any; sent_at: any },
{ Message }: { Message: any }
): Promise<any>;
export function getMessagesBySender(
{ source, sourceDevice }: { source: any; sourceDevice: any },
{ Message }: { Message: any }
): Promise<MessageCollection>;
export function getMessageIdsFromServerIds(
serverIds: any,
conversationId: any
): Promise<any>;
export function getMessageById(
id: string,
{ Message }: { Message: any }
): Promise<any>;
export function getAllMessages({
MessageCollection,
}: {
MessageCollection: any;
}): Promise<any>;
export function getAllUnsentMessages({
MessageCollection,
}: {
MessageCollection: any;
}): Promise<any>;
export function getAllMessageIds(): Promise<any>;
export function getMessagesBySentAt(
sentAt: any,
{ MessageCollection }: { MessageCollection: any }
): Promise<any>;
export function getExpiredMessages({
MessageCollection,
}: {
MessageCollection: any;
}): Promise<any>;
export function getOutgoingWithoutExpiresAt({
MessageCollection,
}: any): Promise<any>;
export function getNextExpiringMessage({
MessageCollection,
}: {
MessageCollection: any;
}): Promise<any>;
export function getNextExpiringMessage({
MessageCollection,
}: {
MessageCollection: any;
}): Promise<any>;
export function getMessagesByConversation(
conversationId: any,
{
limit,
receivedAt,
MessageCollection,
type,
}: {
limit?: number;
receivedAt?: number;
MessageCollection: any;
type?: string;
}
): Promise<any>;
export function getSeenMessagesByHashList(hashes: any): Promise<any>;
export function getLastHashBySnode(convoId: any, snode: any): Promise<any>;
// Unprocessed
export function getUnprocessedCount(): Promise<any>;
export function getAllUnprocessed(): Promise<any>;
export function getUnprocessedById(id: any): Promise<any>;
export function saveUnprocessed(
data: any,
{
forceSave,
}?: {
forceSave: any;
}
): Promise<any>;
export function saveUnprocesseds(
arrayOfUnprocessed: any,
{
forceSave,
}?: {
forceSave: any;
}
): Promise<void>;
export function updateUnprocessedAttempts(
id: any,
attempts: any
): Promise<void>;
export function updateUnprocessedWithData(id: any, data: any): Promise<void>;
export function removeUnprocessed(id: any): Promise<void>;
export function removeAllUnprocessed(): Promise<void>;
// Attachment Downloads
export function getNextAttachmentDownloadJobs(limit: any): Promise<any>;
export function saveAttachmentDownloadJob(job: any): Promise<void>;
export function setAttachmentDownloadJobPending(
id: any,
pending: any
): Promise<void>;
export function resetAttachmentDownloadPending(): Promise<void>;
export function removeAttachmentDownloadJob(id: any): Promise<void>;
export function removeAllAttachmentDownloadJobs(): Promise<void>;
// Other
export function removeAll(): Promise<void>;
export function removeAllConfiguration(): Promise<void>;
export function removeAllConversations(): Promise<void>;
export function removeAllPrivateConversations(): Promise<void>;
export function removeOtherData(): Promise<void>;
export function cleanupOrphanedAttachments(): Promise<void>;
// Getters
export function getMessagesNeedingUpgrade(
limit: any,
{
maxVersion,
}: {
maxVersion?: number;
}
): Promise<any>;
export function getLegacyMessagesNeedingUpgrade(
limit: any,
{
maxVersion,
}: {
maxVersion?: number;
}
): Promise<any>;
export function getMessagesWithVisualMediaAttachments(
conversationId: any,
{
limit,
}: {
limit: any;
}
): Promise<any>;
export function getMessagesWithFileAttachments(
conversationId: any,
{
limit,
}: {
limit: any;
}
): Promise<any>;
// Sender Keys
export function removeAllClosedGroupRatchets(groupId: string): Promise<void>;
export function getAllEncryptionKeyPairsForGroup(
groupPublicKey: string | PubKey
): Promise<Array<HexKeyPair> | undefined>;
export function isKeyPairAlreadySaved(
groupPublicKey: string,
keypair: HexKeyPair
): Promise<boolean>;
export function getLatestClosedGroupEncryptionKeyPair(
groupPublicKey: string
): Promise<HexKeyPair | undefined>;
export function addClosedGroupEncryptionKeyPair(
groupPublicKey: string,
keypair: HexKeyPair
): Promise<void>;
export function removeAllClosedGroupEncryptionKeyPairs(
groupPublicKey: string
): Promise<void>;

File diff suppressed because it is too large Load Diff

View File

@ -1,60 +0,0 @@
/* eslint-env browser */
const EventEmitter = require('events');
const POLL_INTERVAL_MS = 5 * 1000;
const IDLE_THRESHOLD_MS = 20;
class IdleDetector extends EventEmitter {
constructor() {
super();
this.handle = null;
this.timeoutId = null;
}
start() {
window.log.info('Start idle detector');
this._scheduleNextCallback();
}
stop() {
if (!this.handle) {
return;
}
window.log.info('Stop idle detector');
this._clearScheduledCallbacks();
}
_clearScheduledCallbacks() {
if (this.handle) {
cancelIdleCallback(this.handle);
this.handle = null;
}
if (this.timeoutId) {
clearTimeout(this.timeoutId);
this.timeoutId = null;
}
}
_scheduleNextCallback() {
this._clearScheduledCallbacks();
this.handle = window.requestIdleCallback(deadline => {
const { didTimeout } = deadline;
const timeRemaining = deadline.timeRemaining();
const isIdle = timeRemaining >= IDLE_THRESHOLD_MS;
this.timeoutId = setTimeout(
() => this._scheduleNextCallback(),
POLL_INTERVAL_MS
);
if (isIdle || didTimeout) {
this.emit('idle', { timestamp: Date.now(), didTimeout, timeRemaining });
}
});
}
}
module.exports = {
IdleDetector,
};

View File

@ -1,168 +0,0 @@
/* global window, Whisper, textsecure */
const { isFunction } = require('lodash');
const MessageDataMigrator = require('./messages_data_migrator');
const {
run,
getLatestVersion,
getDatabase,
} = require('./migrations/migrations');
const MESSAGE_MINIMUM_VERSION = 7;
module.exports = {
doesDatabaseExist,
mandatoryMessageUpgrade,
MESSAGE_MINIMUM_VERSION,
migrateAllToSQLCipher,
removeDatabase,
runMigrations,
};
async function runMigrations() {
window.log.info('Run migrations on database with attachment data');
await run({
Backbone: window.Backbone,
logger: window.log,
});
Whisper.Database.migrations[0].version = getLatestVersion();
}
async function mandatoryMessageUpgrade({ upgradeMessageSchema } = {}) {
if (!isFunction(upgradeMessageSchema)) {
throw new Error(
'mandatoryMessageUpgrade: upgradeMessageSchema must be a function!'
);
}
const NUM_MESSAGES_PER_BATCH = 10;
window.log.info(
'upgradeMessages: Mandatory message schema upgrade started.',
`Target version: ${MESSAGE_MINIMUM_VERSION}`
);
let isMigrationWithoutIndexComplete = false;
while (!isMigrationWithoutIndexComplete) {
const database = getDatabase();
// eslint-disable-next-line no-await-in-loop
const batchWithoutIndex = await MessageDataMigrator.processNextBatchWithoutIndex(
{
databaseName: database.name,
minDatabaseVersion: database.version,
numMessagesPerBatch: NUM_MESSAGES_PER_BATCH,
upgradeMessageSchema,
maxVersion: MESSAGE_MINIMUM_VERSION,
BackboneMessage: window.models.Message.MessageModel,
saveMessage: window.Signal.Data.saveLegacyMessage,
}
);
window.log.info(
'upgradeMessages: upgrade without index',
batchWithoutIndex
);
isMigrationWithoutIndexComplete = batchWithoutIndex.done;
}
window.log.info('upgradeMessages: upgrade without index complete!');
let isMigrationWithIndexComplete = false;
while (!isMigrationWithIndexComplete) {
// eslint-disable-next-line no-await-in-loop
const batchWithIndex = await MessageDataMigrator.processNext({
BackboneMessage: window.models.Message.MessageModel,
BackboneMessageCollection: window.models.Message.MessageCollection,
numMessagesPerBatch: NUM_MESSAGES_PER_BATCH,
upgradeMessageSchema,
getMessagesNeedingUpgrade:
window.Signal.Data.getLegacyMessagesNeedingUpgrade,
saveMessage: window.Signal.Data.saveLegacyMessage,
maxVersion: MESSAGE_MINIMUM_VERSION,
});
window.log.info('upgradeMessages: upgrade with index', batchWithIndex);
isMigrationWithIndexComplete = batchWithIndex.done;
}
window.log.info('upgradeMessages: upgrade with index complete!');
window.log.info('upgradeMessages: Message schema upgrade complete');
}
async function migrateAllToSQLCipher({ writeNewAttachmentData, Views } = {}) {
if (!isFunction(writeNewAttachmentData)) {
throw new Error(
'migrateAllToSQLCipher: writeNewAttachmentData must be a function'
);
}
if (!Views) {
throw new Error('migrateAllToSQLCipher: Views must be provided!');
}
let totalMessages;
const db = await Whisper.Database.open();
function showMigrationStatus(current) {
const status = `${current}/${totalMessages}`;
Views.Initialization.setMessage(
window.i18n('migratingToSQLCipher', [status])
);
}
try {
totalMessages = await MessageDataMigrator.getNumMessages({
connection: db,
});
} catch (error) {
window.log.error(
'background.getNumMessages error:',
error && error.stack ? error.stack : error
);
totalMessages = 0;
}
if (totalMessages) {
window.log.info(`About to migrate ${totalMessages} messages`);
showMigrationStatus(0);
} else {
window.log.info('About to migrate non-messages');
}
await window.Signal.migrateToSQL({
db,
clearStores: Whisper.Database.clearStores,
handleDOMException: Whisper.Database.handleDOMException,
arrayBufferToString: textsecure.MessageReceiver.arrayBufferToStringBase64,
countCallback: count => {
window.log.info(`Migration: ${count} messages complete`);
showMigrationStatus(count);
},
writeNewAttachmentData,
});
db.close();
}
async function doesDatabaseExist() {
return new Promise((resolve, reject) => {
const { id } = Whisper.Database;
const req = window.indexedDB.open(id);
let existed = true;
req.onerror = reject;
req.onsuccess = () => {
req.result.close();
resolve(existed);
};
req.onupgradeneeded = () => {
if (req.result.version === 1) {
existed = false;
window.indexedDB.deleteDatabase(id);
}
};
});
}
function removeDatabase() {
window.log.info(`Deleting IndexedDB database '${Whisper.Database.id}'`);
window.indexedDB.deleteDatabase(Whisper.Database.id);
}

View File

@ -1080,8 +1080,12 @@ class LokiPublicChannelAPI {
async getPrivateKey() {
if (!this.myPrivateKey) {
const myKeyPair = await textsecure.storage.protocol.getIdentityKeyPair();
this.myPrivateKey = myKeyPair.privKey;
const item = await window.Signal.Data.getItemById('identityKey');
const keyPair = (item && item.value) || undefined;
if (!keyPair) {
window.log.error('Could not get our Keypair from getItemById');
}
this.myPrivateKey = keyPair.privKey;
}
return this.myPrivateKey;
}

View File

@ -1,405 +0,0 @@
// Module to upgrade the schema of messages, e.g. migrate attachments to disk.
// `dangerouslyProcessAllWithoutIndex` purposely doesnt rely on our Backbone
// IndexedDB adapter to prevent automatic migrations. Rather, it uses direct
// IndexedDB access. This includes avoiding usage of `storage` module which uses
// Backbone under the hood.
/* global IDBKeyRange, window */
const { isFunction, isNumber, isObject, isString, last } = require('lodash');
const database = require('./database');
const Message = require('./types/message');
const settings = require('./settings');
const MESSAGES_STORE_NAME = 'messages';
exports.processNext = async ({
BackboneMessage,
BackboneMessageCollection,
numMessagesPerBatch,
upgradeMessageSchema,
getMessagesNeedingUpgrade,
saveMessage,
maxVersion = Message.CURRENT_SCHEMA_VERSION,
} = {}) => {
if (!isFunction(BackboneMessage)) {
throw new TypeError(
"'BackboneMessage' (MessageModel) constructor is required"
);
}
if (!isFunction(BackboneMessageCollection)) {
throw new TypeError(
"'BackboneMessageCollection' (window.models.Message.MessageCollection)" +
' constructor is required'
);
}
if (!isNumber(numMessagesPerBatch)) {
throw new TypeError("'numMessagesPerBatch' is required");
}
if (!isFunction(upgradeMessageSchema)) {
throw new TypeError("'upgradeMessageSchema' is required");
}
const startTime = Date.now();
const fetchStartTime = Date.now();
let messagesRequiringSchemaUpgrade;
try {
messagesRequiringSchemaUpgrade = await getMessagesNeedingUpgrade(
numMessagesPerBatch,
{
maxVersion,
MessageCollection: BackboneMessageCollection,
}
);
} catch (error) {
window.log.error(
'processNext error:',
error && error.stack ? error.stack : error
);
return {
done: true,
numProcessed: 0,
};
}
const fetchDuration = Date.now() - fetchStartTime;
const upgradeStartTime = Date.now();
const upgradedMessages = await Promise.all(
messagesRequiringSchemaUpgrade.map(message =>
upgradeMessageSchema(message, { maxVersion })
)
);
const upgradeDuration = Date.now() - upgradeStartTime;
const saveStartTime = Date.now();
await Promise.all(
upgradedMessages.map(message =>
saveMessage(message, { Message: BackboneMessage })
)
);
const saveDuration = Date.now() - saveStartTime;
const totalDuration = Date.now() - startTime;
const numProcessed = messagesRequiringSchemaUpgrade.length;
const done = numProcessed < numMessagesPerBatch;
return {
done,
numProcessed,
fetchDuration,
upgradeDuration,
saveDuration,
totalDuration,
};
};
exports.dangerouslyProcessAllWithoutIndex = async ({
databaseName,
minDatabaseVersion,
numMessagesPerBatch,
upgradeMessageSchema,
logger,
maxVersion = Message.CURRENT_SCHEMA_VERSION,
saveMessage,
BackboneMessage,
} = {}) => {
if (!isString(databaseName)) {
throw new TypeError("'databaseName' must be a string");
}
if (!isNumber(minDatabaseVersion)) {
throw new TypeError("'minDatabaseVersion' must be a number");
}
if (!isNumber(numMessagesPerBatch)) {
throw new TypeError("'numMessagesPerBatch' must be a number");
}
if (!isFunction(upgradeMessageSchema)) {
throw new TypeError("'upgradeMessageSchema' is required");
}
if (!isFunction(BackboneMessage)) {
throw new TypeError("'upgradeMessageSchema' is required");
}
if (!isFunction(saveMessage)) {
throw new TypeError("'upgradeMessageSchema' is required");
}
const connection = await database.open(databaseName);
const databaseVersion = connection.version;
const isValidDatabaseVersion = databaseVersion >= minDatabaseVersion;
logger.info('Database status', {
databaseVersion,
isValidDatabaseVersion,
minDatabaseVersion,
});
if (!isValidDatabaseVersion) {
throw new Error(
`Expected database version (${databaseVersion})` +
` to be at least ${minDatabaseVersion}`
);
}
// NOTE: Even if we make this async using `then`, requesting `count` on an
// IndexedDB store blocks all subsequent transactions, so we might as well
// explicitly wait for it here:
const numTotalMessages = await exports.getNumMessages({ connection });
const migrationStartTime = Date.now();
let numCumulativeMessagesProcessed = 0;
// eslint-disable-next-line no-constant-condition
while (true) {
// eslint-disable-next-line no-await-in-loop
const status = await _processBatch({
connection,
numMessagesPerBatch,
upgradeMessageSchema,
maxVersion,
saveMessage,
BackboneMessage,
});
if (status.done) {
break;
}
numCumulativeMessagesProcessed += status.numMessagesProcessed;
logger.info(
'Upgrade message schema:',
Object.assign({}, status, {
numTotalMessages,
numCumulativeMessagesProcessed,
})
);
}
logger.info('Close database connection');
connection.close();
const totalDuration = Date.now() - migrationStartTime;
logger.info('Attachment migration complete:', {
totalDuration,
totalMessagesProcessed: numCumulativeMessagesProcessed,
});
};
exports.processNextBatchWithoutIndex = async ({
databaseName,
minDatabaseVersion,
numMessagesPerBatch,
upgradeMessageSchema,
maxVersion,
BackboneMessage,
saveMessage,
} = {}) => {
if (!isFunction(upgradeMessageSchema)) {
throw new TypeError("'upgradeMessageSchema' is required");
}
const connection = await _getConnection({ databaseName, minDatabaseVersion });
const batch = await _processBatch({
connection,
numMessagesPerBatch,
upgradeMessageSchema,
maxVersion,
BackboneMessage,
saveMessage,
});
return batch;
};
// Private API
const _getConnection = async ({ databaseName, minDatabaseVersion }) => {
if (!isString(databaseName)) {
throw new TypeError("'databaseName' must be a string");
}
if (!isNumber(minDatabaseVersion)) {
throw new TypeError("'minDatabaseVersion' must be a number");
}
const connection = await database.open(databaseName);
const databaseVersion = connection.version;
const isValidDatabaseVersion = databaseVersion >= minDatabaseVersion;
if (!isValidDatabaseVersion) {
throw new Error(
`Expected database version (${databaseVersion})` +
` to be at least ${minDatabaseVersion}`
);
}
return connection;
};
const _processBatch = async ({
connection,
numMessagesPerBatch,
upgradeMessageSchema,
maxVersion,
BackboneMessage,
saveMessage,
} = {}) => {
if (!isObject(connection)) {
throw new TypeError('_processBatch: connection must be a string');
}
if (!isFunction(upgradeMessageSchema)) {
throw new TypeError('_processBatch: upgradeMessageSchema is required');
}
if (!isNumber(numMessagesPerBatch)) {
throw new TypeError('_processBatch: numMessagesPerBatch is required');
}
if (!isNumber(maxVersion)) {
throw new TypeError('_processBatch: maxVersion is required');
}
if (!isFunction(BackboneMessage)) {
throw new TypeError('_processBatch: BackboneMessage is required');
}
if (!isFunction(saveMessage)) {
throw new TypeError('_processBatch: saveMessage is required');
}
const isAttachmentMigrationComplete = await settings.isAttachmentMigrationComplete(
connection
);
if (isAttachmentMigrationComplete) {
return {
done: true,
};
}
const lastProcessedIndex = await settings.getAttachmentMigrationLastProcessedIndex(
connection
);
const fetchUnprocessedMessagesStartTime = Date.now();
let unprocessedMessages;
try {
unprocessedMessages = await _dangerouslyFetchMessagesRequiringSchemaUpgradeWithoutIndex(
{
connection,
count: numMessagesPerBatch,
lastIndex: lastProcessedIndex,
}
);
} catch (error) {
window.log.error(
'_processBatch error:',
error && error.stack ? error.stack : error
);
await settings.markAttachmentMigrationComplete(connection);
await settings.deleteAttachmentMigrationLastProcessedIndex(connection);
return {
done: true,
};
}
const fetchDuration = Date.now() - fetchUnprocessedMessagesStartTime;
const upgradeStartTime = Date.now();
const upgradedMessages = await Promise.all(
unprocessedMessages.map(message =>
upgradeMessageSchema(message, { maxVersion })
)
);
const upgradeDuration = Date.now() - upgradeStartTime;
const saveMessagesStartTime = Date.now();
const transaction = connection.transaction(MESSAGES_STORE_NAME, 'readwrite');
const transactionCompletion = database.completeTransaction(transaction);
await Promise.all(
upgradedMessages.map(message =>
saveMessage(message, { Message: BackboneMessage })
)
);
await transactionCompletion;
const saveDuration = Date.now() - saveMessagesStartTime;
const numMessagesProcessed = upgradedMessages.length;
const done = numMessagesProcessed < numMessagesPerBatch;
const lastMessage = last(upgradedMessages);
const newLastProcessedIndex = lastMessage ? lastMessage.id : null;
if (!done) {
await settings.setAttachmentMigrationLastProcessedIndex(
connection,
newLastProcessedIndex
);
} else {
await settings.markAttachmentMigrationComplete(connection);
await settings.deleteAttachmentMigrationLastProcessedIndex(connection);
}
const batchTotalDuration = Date.now() - fetchUnprocessedMessagesStartTime;
return {
batchTotalDuration,
done,
fetchDuration,
lastProcessedIndex,
newLastProcessedIndex,
numMessagesProcessed,
saveDuration,
targetSchemaVersion: Message.CURRENT_SCHEMA_VERSION,
upgradeDuration,
};
};
// NOTE: Named dangerous because it is not as efficient as using our
// `messages` `schemaVersion` index:
const _dangerouslyFetchMessagesRequiringSchemaUpgradeWithoutIndex = ({
connection,
count,
lastIndex,
} = {}) => {
if (!isObject(connection)) {
throw new TypeError("'connection' is required");
}
if (!isNumber(count)) {
throw new TypeError("'count' is required");
}
if (lastIndex && !isString(lastIndex)) {
throw new TypeError("'lastIndex' must be a string");
}
const hasLastIndex = Boolean(lastIndex);
const transaction = connection.transaction(MESSAGES_STORE_NAME, 'readonly');
const messagesStore = transaction.objectStore(MESSAGES_STORE_NAME);
const excludeLowerBound = true;
const range = hasLastIndex
? IDBKeyRange.lowerBound(lastIndex, excludeLowerBound)
: undefined;
return new Promise((resolve, reject) => {
const items = [];
const request = messagesStore.openCursor(range);
request.onsuccess = event => {
const cursor = event.target.result;
const hasMoreData = Boolean(cursor);
if (!hasMoreData || items.length === count) {
resolve(items);
return;
}
const item = cursor.value;
items.push(item);
cursor.continue();
};
request.onerror = event => reject(event.target.error);
});
};
exports.getNumMessages = async ({ connection } = {}) => {
if (!isObject(connection)) {
throw new TypeError("'connection' is required");
}
const transaction = connection.transaction(MESSAGES_STORE_NAME, 'readonly');
const messagesStore = transaction.objectStore(MESSAGES_STORE_NAME);
const numTotalMessages = await database.getCount({ store: messagesStore });
await database.completeTransaction(transaction);
return numTotalMessages;
};

View File

@ -1,409 +0,0 @@
/* global window, IDBKeyRange */
const { includes, isFunction, isString, last, map } = require('lodash');
const {
bulkAddSessions,
bulkAddIdentityKeys,
bulkAddPreKeys,
bulkAddSignedPreKeys,
bulkAddItems,
removeSessionById,
removeIdentityKeyById,
removePreKeyById,
removeSignedPreKeyById,
removeItemById,
saveMessages,
_removeMessages,
saveUnprocesseds,
removeUnprocessed,
saveConversations,
_removeConversations,
} = require('./data');
const {
getMessageExportLastIndex,
setMessageExportLastIndex,
getMessageExportCount,
setMessageExportCount,
getUnprocessedExportLastIndex,
setUnprocessedExportLastIndex,
} = require('./settings');
const { migrateConversation } = require('./types/conversation');
module.exports = {
migrateToSQL,
};
async function migrateToSQL({
db,
clearStores,
handleDOMException,
countCallback,
arrayBufferToString,
writeNewAttachmentData,
}) {
if (!db) {
throw new Error('Need db for IndexedDB connection!');
}
if (!isFunction(clearStores)) {
throw new Error('Need clearStores function!');
}
if (!isFunction(arrayBufferToString)) {
throw new Error('Need arrayBufferToString function!');
}
if (!isFunction(handleDOMException)) {
throw new Error('Need handleDOMException function!');
}
window.log.info('migrateToSQL: start');
let [lastIndex, doneSoFar] = await Promise.all([
getMessageExportLastIndex(db),
getMessageExportCount(db),
]);
let complete = false;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
save: saveMessages,
remove: _removeMessages,
storeName: 'messages',
handleDOMException,
lastIndex,
});
({ complete, lastIndex } = status);
// eslint-disable-next-line no-await-in-loop
await Promise.all([
setMessageExportCount(db, doneSoFar),
setMessageExportLastIndex(db, lastIndex),
]);
const { count } = status;
doneSoFar += count;
if (countCallback) {
countCallback(doneSoFar);
}
}
window.log.info('migrateToSQL: migrate of messages complete');
try {
await clearStores(['messages']);
} catch (error) {
window.log.warn('Failed to clear messages store');
}
lastIndex = await getUnprocessedExportLastIndex(db);
complete = false;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
save: async array => {
await Promise.all(
map(array, async item => {
// In the new database, we can't store ArrayBuffers, so we turn these two
// fields into strings like MessageReceiver now does before save.
// Need to set it to version two, since we're using Base64 strings now
// eslint-disable-next-line no-param-reassign
item.version = 2;
if (item.envelope) {
// eslint-disable-next-line no-param-reassign
item.envelope = await arrayBufferToString(item.envelope);
}
if (item.decrypted) {
// eslint-disable-next-line no-param-reassign
item.decrypted = await arrayBufferToString(item.decrypted);
}
})
);
await saveUnprocesseds(array);
},
remove: removeUnprocessed,
storeName: 'unprocessed',
handleDOMException,
lastIndex,
});
({ complete, lastIndex } = status);
// eslint-disable-next-line no-await-in-loop
await setUnprocessedExportLastIndex(db, lastIndex);
}
window.log.info('migrateToSQL: migrate of unprocessed complete');
try {
await clearStores(['unprocessed']);
} catch (error) {
window.log.warn('Failed to clear unprocessed store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: async array => {
const conversations = await Promise.all(
map(array, async conversation =>
migrateConversation(conversation, { writeNewAttachmentData })
)
);
saveConversations(conversations);
},
remove: _removeConversations,
storeName: 'conversations',
handleDOMException,
lastIndex,
// Because we're doing real-time moves to the filesystem, minimize parallelism
batchSize: 5,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of conversations complete');
try {
await clearStores(['conversations']);
} catch (error) {
window.log.warn('Failed to clear conversations store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddSessions,
remove: removeSessionById,
storeName: 'sessions',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of sessions complete');
try {
await clearStores(['sessions']);
} catch (error) {
window.log.warn('Failed to clear sessions store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddIdentityKeys,
remove: removeIdentityKeyById,
storeName: 'identityKeys',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of identityKeys complete');
try {
await clearStores(['identityKeys']);
} catch (error) {
window.log.warn('Failed to clear identityKeys store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddPreKeys,
remove: removePreKeyById,
storeName: 'preKeys',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of preKeys complete');
try {
await clearStores(['preKeys']);
} catch (error) {
window.log.warn('Failed to clear preKeys store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddSignedPreKeys,
remove: removeSignedPreKeyById,
storeName: 'signedPreKeys',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of signedPreKeys complete');
try {
await clearStores(['signedPreKeys']);
} catch (error) {
window.log.warn('Failed to clear signedPreKeys store');
}
complete = false;
lastIndex = null;
while (!complete) {
// eslint-disable-next-line no-await-in-loop
const status = await migrateStoreToSQLite({
db,
// eslint-disable-next-line no-loop-func
save: bulkAddItems,
remove: removeItemById,
storeName: 'items',
handleDOMException,
lastIndex,
batchSize: 10,
});
({ complete, lastIndex } = status);
}
window.log.info('migrateToSQL: migrate of items complete');
// Note: we don't clear the items store because it contains important metadata which,
// if this process fails, will be crucial to going through this process again.
window.log.info('migrateToSQL: complete');
}
async function migrateStoreToSQLite({
db,
save,
remove,
storeName,
handleDOMException,
lastIndex = null,
batchSize = 50,
}) {
if (!db) {
throw new Error('Need db for IndexedDB connection!');
}
if (!isFunction(save)) {
throw new Error('Need save function!');
}
if (!isFunction(remove)) {
throw new Error('Need remove function!');
}
if (!isString(storeName)) {
throw new Error('Need storeName!');
}
if (!isFunction(handleDOMException)) {
throw new Error('Need handleDOMException for error handling!');
}
if (!includes(db.objectStoreNames, storeName)) {
return {
complete: true,
count: 0,
};
}
const queryPromise = new Promise((resolve, reject) => {
const items = [];
const transaction = db.transaction(storeName, 'readonly');
transaction.onerror = () => {
handleDOMException(
'migrateToSQLite transaction error',
transaction.error,
reject
);
};
transaction.oncomplete = () => {};
const store = transaction.objectStore(storeName);
const excludeLowerBound = true;
const range = lastIndex
? IDBKeyRange.lowerBound(lastIndex, excludeLowerBound)
: undefined;
const request = store.openCursor(range);
request.onerror = () => {
handleDOMException(
'migrateToSQLite: request error',
request.error,
reject
);
};
request.onsuccess = event => {
const cursor = event.target.result;
if (!cursor || !cursor.value) {
return resolve({
complete: true,
items,
});
}
const item = cursor.value;
items.push(item);
if (items.length >= batchSize) {
return resolve({
complete: false,
items,
});
}
return cursor.continue();
};
});
const { items, complete } = await queryPromise;
if (items.length) {
// Because of the force save and some failed imports, we're going to delete before
// we attempt to insert.
const ids = items.map(item => item.id);
await remove(ids);
// We need to pass forceSave parameter, because these items already have an
// id key. Normally, this call would be interpreted as an update request.
await save(items, { forceSave: true });
}
const lastItem = last(items);
const id = lastItem ? lastItem.id : null;
return {
complete,
count: items.length,
lastIndex: id,
};
}

View File

@ -1,17 +0,0 @@
exports.run = ({ transaction, logger }) => {
const messagesStore = transaction.objectStore('messages');
logger.info("Create message attachment metadata index: 'hasAttachments'");
messagesStore.createIndex(
'hasAttachments',
['conversationId', 'hasAttachments', 'received_at'],
{ unique: false }
);
['hasVisualMediaAttachments', 'hasFileAttachments'].forEach(name => {
logger.info(`Create message attachment metadata index: '${name}'`);
messagesStore.createIndex(name, ['conversationId', 'received_at', name], {
unique: false,
});
});
};

View File

@ -1,35 +0,0 @@
/* global window, Whisper */
const Migrations = require('./migrations');
exports.getPlaceholderMigrations = () => {
const version = Migrations.getLatestVersion();
return [
{
version,
migrate() {
throw new Error(
'Unexpected invocation of placeholder migration!' +
'\n\nMigrations must explicitly be run upon application startup instead' +
' of implicitly via Backbone IndexedDB adapter at any time.'
);
},
},
];
};
exports.getCurrentVersion = () =>
new Promise((resolve, reject) => {
const request = window.indexedDB.open(Whisper.Database.id);
request.onerror = reject;
request.onupgradeneeded = reject;
request.onsuccess = () => {
const db = request.result;
const { version } = db;
return resolve(version);
};
});

View File

@ -1,245 +0,0 @@
/* global window */
const { isString, last } = require('lodash');
const { runMigrations } = require('./run_migrations');
const Migration18 = require('./18');
// IMPORTANT: The migrations below are run on a database that may be very large
// due to attachments being directly stored inside the database. Please avoid
// any expensive operations, e.g. modifying all messages / attachments, etc., as
// it may cause out-of-memory errors for users with long histories:
// https://github.com/signalapp/Signal-Desktop/issues/2163
const migrations = [
{
version: '12.0',
migrate(transaction, next) {
window.log.info('Migration 12');
window.log.info('creating object stores');
const messages = transaction.db.createObjectStore('messages');
messages.createIndex('conversation', ['conversationId', 'received_at'], {
unique: false,
});
messages.createIndex('receipt', 'sent_at', { unique: false });
messages.createIndex('unread', ['conversationId', 'unread'], {
unique: false,
});
messages.createIndex('expires_at', 'expires_at', { unique: false });
const conversations = transaction.db.createObjectStore('conversations');
conversations.createIndex('inbox', 'active_at', { unique: false });
conversations.createIndex('group', 'members', {
unique: false,
multiEntry: true,
});
conversations.createIndex('type', 'type', {
unique: false,
});
conversations.createIndex('search', 'tokens', {
unique: false,
multiEntry: true,
});
transaction.db.createObjectStore('groups');
transaction.db.createObjectStore('sessions');
transaction.db.createObjectStore('identityKeys');
const preKeys = transaction.db.createObjectStore('preKeys', {
keyPath: 'id',
});
preKeys.createIndex('recipient', 'recipient', { unique: true });
transaction.db.createObjectStore('signedPreKeys');
transaction.db.createObjectStore('items');
const contactPreKeys = transaction.db.createObjectStore(
'contactPreKeys',
{ keyPath: 'id', autoIncrement: true }
);
contactPreKeys.createIndex('identityKeyString', 'identityKeyString', {
unique: false,
});
contactPreKeys.createIndex('keyId', 'keyId', { unique: false });
const contactSignedPreKeys = transaction.db.createObjectStore(
'contactSignedPreKeys',
{ keyPath: 'id', autoIncrement: true }
);
contactSignedPreKeys.createIndex(
'identityKeyString',
'identityKeyString',
{ unique: false }
);
contactSignedPreKeys.createIndex('keyId', 'keyId', { unique: false });
window.log.info('creating debug log');
transaction.db.createObjectStore('debug');
next();
},
},
{
version: '13.0',
migrate(transaction, next) {
window.log.info('Migration 13');
window.log.info('Adding fields to identity keys');
const identityKeys = transaction.objectStore('identityKeys');
const request = identityKeys.openCursor();
const promises = [];
request.onsuccess = event => {
const cursor = event.target.result;
if (cursor) {
const attributes = cursor.value;
attributes.timestamp = 0;
attributes.firstUse = false;
attributes.nonblockingApproval = false;
attributes.verified = 0;
promises.push(
new Promise((resolve, reject) => {
const putRequest = identityKeys.put(attributes, attributes.id);
putRequest.onsuccess = resolve;
putRequest.onerror = error => {
window.log.error(error && error.stack ? error.stack : error);
reject(error);
};
})
);
cursor.continue();
} else {
// no more results
// eslint-disable-next-line more/no-then
Promise.all(promises).then(() => {
next();
});
}
};
request.onerror = event => {
window.log.error(event);
};
},
},
{
version: '14.0',
migrate(transaction, next) {
window.log.info('Migration 14');
window.log.info('Adding unprocessed message store');
const unprocessed = transaction.db.createObjectStore('unprocessed');
unprocessed.createIndex('received', 'timestamp', { unique: false });
next();
},
},
{
version: '15.0',
migrate(transaction, next) {
window.log.info('Migration 15');
window.log.info('Adding messages index for de-duplication');
const messages = transaction.objectStore('messages');
messages.createIndex('unique', ['source', 'sourceDevice', 'sent_at'], {
unique: true,
});
next();
},
},
{
version: '16.0',
migrate(transaction, next) {
window.log.info('Migration 16');
window.log.info('Dropping log table, since we now log to disk');
transaction.db.deleteObjectStore('debug');
next();
},
},
{
version: 17,
async migrate(transaction, next) {
window.log.info('Migration 17');
const start = Date.now();
const messagesStore = transaction.objectStore('messages');
window.log.info(
'Create index from attachment schema version to attachment'
);
messagesStore.createIndex('schemaVersion', 'schemaVersion', {
unique: false,
});
const duration = Date.now() - start;
window.log.info(
'Complete migration to database version 17',
`Duration: ${duration}ms`
);
next();
},
},
{
version: 18,
migrate(transaction, next) {
window.log.info('Migration 18');
const start = Date.now();
Migration18.run({ transaction, logger: window.log });
const duration = Date.now() - start;
window.log.info(
'Complete migration to database version 18',
`Duration: ${duration}ms`
);
next();
},
},
{
version: 19,
migrate(transaction, next) {
window.log.info('Migration 19');
// Empty because we don't want to cause incompatibility with beta users who have
// already run migration 19 when it was object store removal.
next();
},
},
{
version: 20,
migrate(transaction, next) {
window.log.info('Migration 20');
// Empty because we don't want to cause incompatibility with users who have already
// run migration 20 when it was object store removal.
next();
},
},
];
const database = {
id: 'loki-messenger',
nolog: true,
migrations,
};
exports.run = ({ Backbone, databaseName, logger } = {}) =>
runMigrations({
Backbone,
logger,
database: Object.assign(
{},
database,
isString(databaseName) ? { id: databaseName } : {}
),
});
exports.getDatabase = () => ({
name: database.id,
version: exports.getLatestVersion(),
});
exports.getLatestVersion = () => {
const lastMigration = last(migrations);
if (!lastMigration) {
return null;
}
return lastMigration.version;
};

View File

@ -1,79 +0,0 @@
/* eslint-env browser */
const { head, isFunction, isObject, isString, last } = require('lodash');
const db = require('../database');
const { deferredToPromise } = require('../deferred_to_promise');
const closeDatabaseConnection = ({ Backbone } = {}) =>
deferredToPromise(Backbone.sync('closeall'));
exports.runMigrations = async ({ Backbone, database, logger } = {}) => {
if (
!isObject(Backbone) ||
!isObject(Backbone.Collection) ||
!isFunction(Backbone.Collection.extend)
) {
throw new TypeError('runMigrations: Backbone is required');
}
if (
!isObject(database) ||
!isString(database.id) ||
!Array.isArray(database.migrations)
) {
throw new TypeError('runMigrations: database is required');
}
if (!isObject(logger)) {
throw new TypeError('runMigrations: logger is required');
}
const {
firstVersion: firstMigrationVersion,
lastVersion: lastMigrationVersion,
} = getMigrationVersions(database);
const databaseVersion = await db.getVersion(database.id);
const isAlreadyUpgraded = databaseVersion >= lastMigrationVersion;
logger.info('Database status', {
firstMigrationVersion,
lastMigrationVersion,
databaseVersion,
isAlreadyUpgraded,
});
if (isAlreadyUpgraded) {
return;
}
const migrationCollection = new (Backbone.Collection.extend({
database,
storeName: 'items',
}))();
// Note: this legacy migration technique is required to bring old clients with
// data in IndexedDB forward into the new world of SQLCipher only.
await deferredToPromise(migrationCollection.fetch({ limit: 1 }));
logger.info('Close database connection');
await closeDatabaseConnection({ Backbone });
};
const getMigrationVersions = database => {
if (!isObject(database) || !Array.isArray(database.migrations)) {
throw new TypeError("'database' is required");
}
const firstMigration = head(database.migrations);
const lastMigration = last(database.migrations);
const firstVersion = firstMigration
? parseInt(firstMigration.version, 10)
: null;
const lastVersion = lastMigration
? parseInt(lastMigration.version, 10)
: null;
return { firstVersion, lastVersion };
};

View File

@ -1,15 +1,13 @@
// The idea with this file is to make it webpackable for the style guide
const Crypto = require('./crypto');
const Data = require('./data');
const Data = require('../../ts/data/data');
const Database = require('./database');
const Emoji = require('../../ts/util/emoji');
const IndexedDB = require('./indexeddb');
const Notifications = require('../../ts/notifications');
const OS = require('../../ts/OS');
const Settings = require('./settings');
const Util = require('../../ts/util');
const { migrateToSQL } = require('./migrate_to_sql');
const LinkPreviews = require('./link_previews');
const AttachmentDownloads = require('./attachment_downloads');
const { Message } = require('../../ts/components/conversation/Message');
@ -57,13 +55,6 @@ const {
RemoveModeratorsDialog,
} = require('../../ts/components/conversation/ModeratorsRemoveDialog');
// Migrations
const {
getPlaceholderMigrations,
getCurrentVersion,
} = require('./migrations/get_placeholder_migrations');
const { run } = require('./migrations/migrations');
// Types
const AttachmentType = require('./types/attachment');
const VisualAttachment = require('./types/visual_attachment');
@ -77,10 +68,6 @@ const SettingsType = require('../../ts/types/Settings');
// Views
const Initialization = require('./views/initialization');
// Workflow
const { IdleDetector } = require('./idle_detector');
const MessageDataMigrator = require('./messages_data_migrator');
function initializeMigrations({
userDataPath,
Attachments,
@ -123,14 +110,11 @@ function initializeMigrations({
deleteOnDisk,
}),
getAbsoluteAttachmentPath,
getPlaceholderMigrations,
getCurrentVersion,
loadAttachmentData,
loadMessage: MessageType.createAttachmentLoader(loadAttachmentData),
loadPreviewData,
loadQuoteData,
readAttachmentData,
run,
processNewAttachment: attachment =>
MessageType.processNewAttachment(attachment, {
writeNewAttachmentData,
@ -213,11 +197,6 @@ exports.setup = (options = {}) => {
Initialization,
};
const Workflow = {
IdleDetector,
MessageDataMigrator,
};
return {
AttachmentDownloads,
Components,
@ -225,9 +204,7 @@ exports.setup = (options = {}) => {
Data,
Database,
Emoji,
IndexedDB,
LinkPreviews,
migrateToSQL,
Migrations,
Notifications,
OS,
@ -235,6 +212,5 @@ exports.setup = (options = {}) => {
Types,
Util,
Views,
Workflow,
};
};

View File

@ -46,10 +46,7 @@
return message;
}
const groups = await window.Signal.Data.getAllGroupsInvolvingId(reader, {
ConversationCollection:
window.models.Conversation.ConversationCollection,
});
const groups = await window.Signal.Data.getAllGroupsInvolvingId(reader);
const ids = groups.pluck('id');
ids.push(reader);

View File

@ -1,10 +1,7 @@
/*
global
dcodeIO,
Backbone,
_,
textsecure,
stringObject,
BlockedNumberController
*/
@ -14,342 +11,12 @@
(function() {
'use strict';
const Direction = {
SENDING: 1,
RECEIVING: 2,
};
const StaticByteBufferProto = new dcodeIO.ByteBuffer().__proto__;
const StaticArrayBufferProto = new ArrayBuffer().__proto__;
const StaticUint8ArrayProto = new Uint8Array().__proto__;
function isStringable(thing) {
return (
thing === Object(thing) &&
(thing.__proto__ === StaticArrayBufferProto ||
thing.__proto__ === StaticUint8ArrayProto ||
thing.__proto__ === StaticByteBufferProto)
);
}
function convertToArrayBuffer(thing) {
if (thing === undefined) {
return undefined;
}
if (thing === Object(thing)) {
if (thing.__proto__ === StaticArrayBufferProto) {
return thing;
}
// TODO: Several more cases here...
}
if (thing instanceof Array) {
// Assuming Uint16Array from curve25519
const res = new ArrayBuffer(thing.length * 2);
const uint = new Uint16Array(res);
for (let i = 0; i < thing.length; i += 1) {
uint[i] = thing[i];
}
return res;
}
let str;
if (isStringable(thing)) {
str = stringObject(thing);
} else if (typeof thing === 'string') {
str = thing;
} else {
throw new Error(
`Tried to convert a non-stringable thing of type ${typeof thing} to an array buffer`
);
}
const res = new ArrayBuffer(str.length);
const uint = new Uint8Array(res);
for (let i = 0; i < str.length; i += 1) {
uint[i] = str.charCodeAt(i);
}
return res;
}
function equalArrayBuffers(ab1, ab2) {
if (!(ab1 instanceof ArrayBuffer && ab2 instanceof ArrayBuffer)) {
return false;
}
if (ab1.byteLength !== ab2.byteLength) {
return false;
}
let result = 0;
const ta1 = new Uint8Array(ab1);
const ta2 = new Uint8Array(ab2);
for (let i = 0; i < ab1.byteLength; i += 1) {
// eslint-disable-next-line no-bitwise
result |= ta1[i] ^ ta2[i];
}
return result === 0;
}
const IdentityRecord = Backbone.Model.extend({
storeName: 'identityKeys',
validAttributes: [
'id',
'publicKey',
'firstUse',
'timestamp',
'nonblockingApproval',
],
validate(attrs) {
const attributeNames = _.keys(attrs);
const { validAttributes } = this;
const allValid = _.all(attributeNames, attributeName =>
_.contains(validAttributes, attributeName)
);
if (!allValid) {
return new Error('Invalid identity key attribute names');
}
const allPresent = _.all(validAttributes, attributeName =>
_.contains(attributeNames, attributeName)
);
if (!allPresent) {
return new Error('Missing identity key attributes');
}
if (typeof attrs.id !== 'string') {
return new Error('Invalid identity key id');
}
if (!(attrs.publicKey instanceof ArrayBuffer)) {
return new Error('Invalid identity key publicKey');
}
if (typeof attrs.firstUse !== 'boolean') {
return new Error('Invalid identity key firstUse');
}
if (typeof attrs.timestamp !== 'number' || !(attrs.timestamp >= 0)) {
return new Error('Invalid identity key timestamp');
}
if (typeof attrs.nonblockingApproval !== 'boolean') {
return new Error('Invalid identity key nonblockingApproval');
}
return null;
},
});
function SignalProtocolStore() {}
async function _hydrateCache(object, field, items, idField) {
const cache = Object.create(null);
for (let i = 0, max = items.length; i < max; i += 1) {
const item = items[i];
const id = item[idField];
cache[id] = item;
}
window.log.info(`SignalProtocolStore: Finished caching ${field} data`);
// eslint-disable-next-line no-param-reassign
object[field] = cache;
}
SignalProtocolStore.prototype = {
constructor: SignalProtocolStore,
async hydrateCaches() {
await Promise.all([
_hydrateCache(
this,
'identityKeys',
await window.Signal.Data.getAllIdentityKeys(),
'id'
),
_hydrateCache(
this,
'sessions',
await window.Signal.Data.getAllSessions(),
'id'
),
_hydrateCache(
this,
'preKeys',
await window.Signal.Data.getAllPreKeys(),
'id'
),
_hydrateCache(
this,
'signedPreKeys',
await window.Signal.Data.getAllSignedPreKeys(),
'id'
),
]);
},
async getIdentityKeyPair() {
const item = await window.Signal.Data.getItemById('identityKey');
if (item) {
return item.value;
}
window.log.error('Could not load identityKey from SignalData');
return undefined;
},
// PreKeys
async clearPreKeyStore() {
this.preKeys = Object.create(null);
await window.Signal.Data.removeAllPreKeys();
},
// Signed PreKeys
async clearSignedPreKeysStore() {
this.signedPreKeys = Object.create(null);
await window.Signal.Data.removeAllSignedPreKeys();
},
// Sessions
async clearSessionStore() {
this.sessions = Object.create(null);
window.Signal.Data.removeAllSessions();
},
// Identity Keys
async loadIdentityKey(identifier) {
if (identifier === null || identifier === undefined) {
throw new Error('Tried to get identity key for undefined/null key');
}
const number = textsecure.utils.unencodeNumber(identifier)[0];
const identityRecord = this.identityKeys[number];
if (identityRecord) {
return identityRecord.publicKey;
}
return undefined;
},
async _saveIdentityKey(data) {
const { id } = data;
this.identityKeys[id] = data;
await window.Signal.Data.createOrUpdateIdentityKey(data);
},
async saveIdentity(identifier, publicKey, nonblockingApproval) {
if (identifier === null || identifier === undefined) {
throw new Error('Tried to put identity key for undefined/null key');
}
if (!(publicKey instanceof ArrayBuffer)) {
// eslint-disable-next-line no-param-reassign
publicKey = convertToArrayBuffer(publicKey);
}
if (typeof nonblockingApproval !== 'boolean') {
// eslint-disable-next-line no-param-reassign
nonblockingApproval = false;
}
const number = textsecure.utils.unencodeNumber(identifier)[0];
const identityRecord = this.identityKeys[number];
if (!identityRecord || !identityRecord.publicKey) {
// Lookup failed, or the current key was removed, so save this one.
window.log.info('Saving new identity...');
await this._saveIdentityKey({
id: number,
publicKey,
firstUse: true,
timestamp: Date.now(),
nonblockingApproval,
});
return false;
}
const oldpublicKey = identityRecord.publicKey;
if (!equalArrayBuffers(oldpublicKey, publicKey)) {
window.log.info('Replacing existing identity...');
await this._saveIdentityKey({
id: number,
publicKey,
firstUse: false,
timestamp: Date.now(),
nonblockingApproval,
});
return true;
}
return false;
},
async saveIdentityWithAttributes(identifier, attributes) {
if (identifier === null || identifier === undefined) {
throw new Error('Tried to put identity key for undefined/null key');
}
const number = textsecure.utils.unencodeNumber(identifier)[0];
const identityRecord = this.identityKeys[number];
const updates = {
id: number,
...identityRecord,
...attributes,
};
const model = new IdentityRecord(updates);
if (model.isValid()) {
await this._saveIdentityKey(updates);
} else {
throw model.validationError;
}
},
async setApproval(identifier, nonblockingApproval) {
if (identifier === null || identifier === undefined) {
throw new Error('Tried to set approval for undefined/null identifier');
}
if (typeof nonblockingApproval !== 'boolean') {
throw new Error('Invalid approval status');
}
const number = textsecure.utils.unencodeNumber(identifier)[0];
const identityRecord = this.identityKeys[number];
if (!identityRecord) {
throw new Error(`No identity record for ${number}`);
}
identityRecord.nonblockingApproval = nonblockingApproval;
await this._saveIdentityKey(identityRecord);
},
async removeIdentityKey(number) {
delete this.identityKeys[number];
await window.Signal.Data.removeIdentityKeyById(number);
},
// Not yet processed messages - for resiliency
getUnprocessedCount() {
return window.Signal.Data.getUnprocessedCount();
},
getAllUnprocessed() {
return window.Signal.Data.getAllUnprocessed();
},
getUnprocessedById(id) {
return window.Signal.Data.getUnprocessedById(id);
},
addUnprocessed(data) {
// We need to pass forceSave because the data has an id already, which will cause
// an update instead of an insert.
return window.Signal.Data.saveUnprocessed(data, {
forceSave: true,
});
},
updateUnprocessedAttempts(id, attempts) {
return window.Signal.Data.updateUnprocessedAttempts(id, attempts);
},
updateUnprocessedWithData(id, data) {
return window.Signal.Data.updateUnprocessedWithData(id, data);
},
removeUnprocessed(id) {
return window.Signal.Data.removeUnprocessed(id);
},
removeAllUnprocessed() {
return window.Signal.Data.removeAllUnprocessed();
},
async removeAllData() {
await window.Signal.Data.removeAll();
await this.hydrateCaches();
window.storage.reset();
await window.storage.fetch();
@ -359,16 +26,8 @@
await window.getConversationController().load();
await BlockedNumberController.load();
},
async removeAllConfiguration() {
await window.Signal.Data.removeAllConfiguration();
await this.hydrateCaches();
window.storage.reset();
await window.storage.fetch();
},
};
_.extend(SignalProtocolStore.prototype, Backbone.Events);
window.SignalProtocolStore = SignalProtocolStore;
window.SignalProtocolStore.prototype.Direction = Direction;
})();

View File

@ -1,7 +1,6 @@
/* global
window,
libsignal,
textsecure,
StringView,
Multibase,
TextEncoder,
@ -147,7 +146,8 @@
const serverPubKey = new Uint8Array(
dcodeIO.ByteBuffer.fromBase64(serverPubKey64).toArrayBuffer()
);
const keyPair = await textsecure.storage.protocol.getIdentityKeyPair();
const item = await window.Signal.Data.getItemById('identityKey');
const keyPair = (item && item.value) || undefined;
if (!keyPair) {
throw new Error('Failed to get keypair for token decryption');
}

View File

@ -82,7 +82,6 @@
return this.pending;
},
async createAccount(identityKeyPair, userAgent, readReceipts) {
const signalingKey = libsignal.crypto.getRandomBytes(32 + 20);
let password = btoa(getString(libsignal.crypto.getRandomBytes(16)));
password = password.substring(0, password.length - 2);
@ -102,16 +101,6 @@
// update our own identity key, which may have changed
// if we're relinking after a reinstall on the master device
const pubKeyString = StringView.arrayBufferToHex(identityKeyPair.pubKey);
await textsecure.storage.protocol.saveIdentityWithAttributes(
pubKeyString,
{
id: pubKeyString,
publicKey: identityKeyPair.pubKey,
firstUse: true,
timestamp: Date.now(),
nonblockingApproval: true,
}
);
await textsecure.storage.put('identityKey', identityKeyPair);
await textsecure.storage.put('password', password);
@ -130,15 +119,15 @@
await textsecure.storage.user.setNumberAndDeviceId(pubKeyString, 1);
},
async clearSessionsAndPreKeys() {
const store = textsecure.storage.protocol;
window.log.info('clearing all sessions');
await Promise.all([store.clearSessionStore()]);
// During secondary device registration we need to keep our prekeys sent
// to other pubkeys
await Promise.all([
store.clearPreKeyStore(),
store.clearSignedPreKeysStore(),
window.Signal.Data.removeAllPreKeys(),
window.Signal.Data.removeAllSignedPreKeys(),
window.Signal.Data.removeAllContactPreKeys(),
window.Signal.Data.removeAllContactSignedPreKeys(),
window.Signal.Data.removeAllSessions(),
]);
},
async generateMnemonic(language = 'english') {

View File

@ -1,4 +1,4 @@
/* global window, textsecure */
/* global window */
// eslint-disable-next-line func-names
(function() {
@ -10,31 +10,30 @@
window.textsecure.storage.unprocessed = {
getCount() {
return textsecure.storage.protocol.getUnprocessedCount();
return window.Signal.Data.getUnprocessedCount();
},
getAll() {
return textsecure.storage.protocol.getAllUnprocessed();
return window.Signal.Data.getAllUnprocessed();
},
get(id) {
return textsecure.storage.protocol.getUnprocessedById(id);
return window.Signal.Data.getUnprocessedById(id);
},
add(data) {
return textsecure.storage.protocol.addUnprocessed(data);
return window.Signal.Data.saveUnprocessed(data, {
forceSave: true,
});
},
updateAttempts(id, attempts) {
return textsecure.storage.protocol.updateUnprocessedAttempts(
id,
attempts
);
return window.Signal.Data.updateUnprocessedAttempts(id, attempts);
},
addDecryptedData(id, data) {
return textsecure.storage.protocol.updateUnprocessedWithData(id, data);
return window.Signal.Data.updateUnprocessedWithData(id, data);
},
remove(id) {
return textsecure.storage.protocol.removeUnprocessed(id);
return window.Signal.Data.removeUnprocessed(id);
},
removeAll() {
return textsecure.storage.protocol.removeAllUnprocessed();
return window.Signal.Data.removeAllUnprocessed();
},
};
})();

14
main.js
View File

@ -756,20 +756,6 @@ async function showMainWindow(sqlKey, passwordAttempt = false) {
appStartInitialSpellcheckSetting = await getSpellCheckSetting();
await sqlChannels.initialize();
try {
const IDB_KEY = 'indexeddb-delete-needed';
const item = await sql.getItemById(IDB_KEY);
if (item && item.value) {
await sql.removeIndexedDBFiles();
await sql.removeItemById(IDB_KEY);
}
} catch (error) {
console.log(
'(ready event handler) error deleting IndexedDB:',
error && error.stack ? error.stack : error
);
}
async function cleanupOrphanedAttachments() {
const allAttachments = await attachments.getAllAttachments(userDataPath);
const orphanedAttachments = await sql.removeKnownAttachments(

View File

@ -86,7 +86,7 @@ window.isBeforeVersion = (toCheck, baseVersion) => {
};
// eslint-disable-next-line func-names
window.CONSTANTS = new (function () {
window.CONSTANTS = new (function() {
this.MAX_GROUP_NAME_LENGTH = 64;
this.DEFAULT_PUBLIC_CHAT_URL = appConfig.get('defaultPublicChatServer');
this.MAX_LINKED_DEVICES = 1;
@ -377,7 +377,7 @@ window.callWorker = (fnName, ...args) => utilWorker.callWorker(fnName, ...args);
// Linux seems to periodically let the event loop stop, so this is a global workaround
setInterval(() => {
window.nodeSetImmediate(() => { });
window.nodeSetImmediate(() => {});
}, 1000);
const { autoOrientImage } = require('./js/modules/auto_orient_image');
@ -417,9 +417,11 @@ window.moment.locale(localeForMoment);
window.OnionAPI = OnionAPI;
window.libsession = require('./ts/session');
window.models = require('./ts/models');
window.Signal = window.Signal || {};
window.Signal.Data = require('./ts/data/data');
window.getMessageController = () =>
window.libsession.Messages.MessageController.getInstance();
@ -446,19 +448,20 @@ window.DataMessageReceiver = require('./ts/receiver/dataMessage');
window.NewSnodeAPI = require('./ts/session/snode_api/serviceNodeAPI');
window.SnodePool = require('./ts/session/snode_api/snodePool');
const { SwarmPolling } = require('./ts/session/snode_api/swarmPolling');
const { SwarmPollingStub } = require('./ts/session/snode_api/swarmPollingStub');
if (process.env.USE_STUBBED_NETWORK) {
const {
SwarmPollingStub,
} = require('./ts/session/snode_api/swarmPollingStub');
window.SwarmPolling = new SwarmPollingStub();
} else {
const { SwarmPolling } = require('./ts/session/snode_api/swarmPolling');
window.SwarmPolling = new SwarmPolling();
}
// eslint-disable-next-line no-extend-native,func-names
Promise.prototype.ignore = function () {
Promise.prototype.ignore = function() {
// eslint-disable-next-line more/no-then
this.then(() => { });
this.then(() => {});
};
if (
@ -474,7 +477,6 @@ if (
tmp: require('tmp'),
path: require('path'),
basePath: __dirname,
attachmentsPath: window.Signal.Migrations.attachmentsPath,
isWindows,
};
/* eslint-enable global-require, import/no-extraneous-dependencies */

View File

@ -552,12 +552,7 @@ describe('Backup', () => {
});
console.log('Backup test: Check conversations');
const conversationCollection = await window.Signal.Data.getAllConversations(
{
ConversationCollection:
window.models.Conversation.ConversationCollection,
}
);
const conversationCollection = await window.Signal.Data.getAllConversations();
assert.strictEqual(conversationCollection.length, CONVERSATION_COUNT);
// We need to ommit any custom fields we have added

View File

@ -2,7 +2,6 @@ import React from 'react';
import { connect } from 'react-redux';
import { SessionIconButton, SessionIconSize, SessionIconType } from './icon';
import { Avatar } from '../Avatar';
import { removeItemById } from '../../../js/modules/data';
import { darkTheme, lightTheme } from '../../state/ducks/SessionTheme';
import { SessionToastContainer } from './SessionToastContainer';
import { mapDispatchToProps } from '../../state/actions';
@ -16,6 +15,7 @@ import { getOurNumber } from '../../state/selectors/user';
import { UserUtils } from '../../session/utils';
import { syncConfigurationIfNeeded } from '../../session/utils/syncUtils';
import { DAYS } from '../../session/utils/Number';
import { removeItemById } from '../../data/data';
// tslint:disable-next-line: no-import-side-effect no-submodule-imports
export enum SectionType {

View File

@ -14,7 +14,7 @@ import { StringUtils, ToastUtils } from '../../session/utils';
import { lightTheme } from '../../state/ducks/SessionTheme';
import { ConversationController } from '../../session/conversations';
import { PasswordUtil } from '../../util';
import { removeAll } from '../../../js/modules/data';
import { removeAll } from '../../data/data';
export const MAX_USERNAME_LENGTH = 20;

View File

@ -1,7 +1,7 @@
import React from 'react';
import { Provider } from 'react-redux';
import { bindActionCreators } from 'redux';
import { getMessageById } from '../../../js/modules/data';
import { getMessageById } from '../../data/data';
import { MessageModel } from '../../models/message';
import { getMessageQueue } from '../../session';
import { ConversationController } from '../../session/conversations';
@ -125,9 +125,7 @@ export class SessionInboxView extends React.Component<Props, State> {
if (!msg || !msg.message) {
// otherwise, look for it in the database
// nobody is listening to this freshly fetched message .trigger calls
const dbMessage = await getMessageById(m.identifier, {
Message: MessageModel,
});
const dbMessage = await getMessageById(m.identifier);
if (!dbMessage) {
return null;

View File

@ -4,11 +4,9 @@ import { SessionModal } from './SessionModal';
import { SessionButton, SessionButtonColor } from './SessionButton';
import { missingCaseError, PasswordUtil } from '../../util/';
import { ToastUtils } from '../../session/utils';
import { toast } from 'react-toastify';
import { SessionToast, SessionToastType } from './SessionToast';
import { SessionIconType } from './icon';
import { DefaultTheme, withTheme } from 'styled-components';
import { getPasswordHash } from '../../../js/modules/data';
import { getPasswordHash } from '../../data/data';
export enum PasswordAction {
Set = 'set',
Change = 'change',

View File

@ -5,7 +5,7 @@ import { SessionButton } from './SessionButton';
import { ToastUtils } from '../../session/utils';
import { DefaultTheme, withTheme } from 'styled-components';
import { PasswordUtil } from '../../util';
import { getPasswordHash } from '../../../js/modules/data';
import { getPasswordHash } from '../../data/data';
interface Props {
onClose: any;

View File

@ -26,15 +26,15 @@ import * as MIME from '../../../types/MIME';
import { SessionFileDropzone } from './SessionFileDropzone';
import { ConversationType } from '../../../state/ducks/conversations';
import { MessageView } from '../../MainViewController';
import {
getMessageById,
getPubkeysInPublicConversation,
} from '../../../../js/modules/data';
import { pushUnblockToSend } from '../../../session/utils/Toast';
import { MessageDetail } from '../../conversation/MessageDetail';
import { ConversationController } from '../../../session/conversations';
import { PubKey } from '../../../session/types';
import { MessageModel } from '../../../models/message';
import {
getMessageById,
getPubkeysInPublicConversation,
} from '../../../data/data';
interface State {
// Message sending progress
@ -808,9 +808,7 @@ export class SessionConversation extends React.Component<Props, State> {
);
if (quotedMessage) {
const quotedMessageModel = await getMessageById(quotedMessage.id, {
Message: MessageModel,
});
const quotedMessageModel = await getMessageById(quotedMessage.id);
if (quotedMessageModel) {
quotedMessageProps = await conversationModel.makeQuote(
quotedMessageModel

View File

@ -15,9 +15,9 @@ import { SessionLastSeenIndicator } from './SessionLastSeedIndicator';
import { ToastUtils } from '../../../session/utils';
import { TypingBubble } from '../../conversation/TypingBubble';
import { ConversationController } from '../../../session/conversations';
import { MessageCollection, MessageModel } from '../../../models/message';
import { MessageModel } from '../../../models/message';
import { MessageRegularProps } from '../../../models/messageType';
import { getMessagesBySentAt } from '../../../../js/modules/data';
import { getMessagesBySentAt } from '../../../data/data';
interface State {
showScrollButton: boolean;
@ -555,9 +555,7 @@ export class SessionMessagesList extends React.Component<Props, State> {
// If there's no message already in memory, we won't be scrolling. So we'll gather
// some more information then show an informative toast to the user.
if (!targetMessage) {
const collection = await getMessagesBySentAt(quoteId, {
MessageCollection,
});
const collection = await getMessagesBySentAt(quoteId);
const found = Boolean(
collection.find((item: MessageModel) => {
const messageAuthor = item.propsForMessage?.authorPhoneNumber;

View File

@ -20,7 +20,7 @@ import { DefaultTheme, withTheme } from 'styled-components';
import {
getMessagesWithFileAttachments,
getMessagesWithVisualMediaAttachments,
} from '../../../../js/modules/data';
} from '../../../data/data';
interface Props {
id: string;

View File

@ -17,7 +17,7 @@ import {
getConversations,
} from '../../../state/selectors/conversations';
import { connect } from 'react-redux';
import { getPasswordHash } from '../../../../js/modules/data';
import { getPasswordHash } from '../../../../ts/data/data';
export enum SessionSettingCategory {
Appearance = 'appearance',

1072
ts/data/data.ts Normal file

File diff suppressed because it is too large Load Diff

View File

@ -29,7 +29,7 @@ import {
removeAllMessagesInConversation,
removeMessage as dataRemoveMessage,
updateConversation,
} from '../../js/modules/data';
} from '../../ts/data/data';
export interface OurLokiProfile {
displayName: string;
@ -520,12 +520,11 @@ export class ConversationModel extends Backbone.Model<ConversationAttributes> {
}
public async getUnread() {
return getUnreadByConversation(this.id, {
MessageCollection: MessageCollection,
});
return getUnreadByConversation(this.id);
}
public async getUnreadCount() {
window.log.warn('getUnreadCount is slow');
return getUnreadCountByConversation(this.id);
}
@ -867,7 +866,6 @@ export class ConversationModel extends Backbone.Model<ConversationAttributes> {
}
const messages = await getMessagesByConversation(this.id, {
limit: 1,
MessageCollection: MessageCollection,
});
const lastMessageModel = messages.at(0);
const lastMessageJSON = lastMessageModel ? lastMessageModel.toJSON() : null;
@ -1009,9 +1007,7 @@ export class ConversationModel extends Backbone.Model<ConversationAttributes> {
}
public async commit() {
await updateConversation(this.id, this.attributes, {
Conversation: ConversationModel,
});
await updateConversation(this.id, this.attributes);
this.trigger('change', this);
}
@ -1058,7 +1054,7 @@ export class ConversationModel extends Backbone.Model<ConversationAttributes> {
conversationId,
})
);
let unreadMessages = await this.getUnread();
let unreadMessages = (await this.getUnread()).models;
const oldUnread = unreadMessages.filter(
(message: any) => message.get('received_at') <= newestUnreadDate
@ -1467,9 +1463,7 @@ export class ConversationModel extends Backbone.Model<ConversationAttributes> {
}
public async removeMessage(messageId: any) {
await dataRemoveMessage(messageId, {
Message: MessageModel,
});
await dataRemoveMessage(messageId);
window.Whisper.events.trigger('messageDeleted', {
conversationKey: this.id,
messageId,
@ -1494,9 +1488,7 @@ export class ConversationModel extends Backbone.Model<ConversationAttributes> {
}
public async destroyMessages() {
await removeAllMessagesInConversation(this.id, {
MessageCollection,
});
await removeAllMessagesInConversation(this.id);
window.Whisper.events.trigger('conversationReset', {
conversationKey: this.id,

View File

@ -21,7 +21,7 @@ import {
} from './messageType';
import autoBind from 'auto-bind';
import { saveMessage } from '../../js/modules/data';
import { saveMessage } from '../../ts/data/data';
import { ConversationModel } from './conversation';
export class MessageModel extends Backbone.Model<MessageAttributes> {
public propsForTimerNotification: any;
@ -34,14 +34,12 @@ export class MessageModel extends Backbone.Model<MessageAttributes> {
const filledAttrs = fillMessageAttributesWithDefaults(attributes);
super(filledAttrs);
if (_.isObject(filledAttrs)) {
this.set(
window.Signal.Types.Message.initializeSchemaVersion({
message: filledAttrs,
logger: window.log,
})
);
}
this.set(
window.Signal.Types.Message.initializeSchemaVersion({
message: filledAttrs,
logger: window.log,
})
);
// this.on('expired', this.onExpired);
void this.setToExpire();
@ -1276,7 +1274,6 @@ export class MessageModel extends Backbone.Model<MessageAttributes> {
// TODO investigate the meaning of the forceSave
const id = await saveMessage(this.attributes, {
forceSave,
Message: MessageModel,
});
this.trigger('change');
return id;

View File

@ -1,7 +1,7 @@
import _ from 'lodash';
import { MessageModel } from '../models/message';
import { saveMessage } from '../../js/modules/data';
import { saveMessage } from '../../ts/data/data';
export async function downloadAttachment(attachment: any) {
const serverUrl = new URL(attachment.url).origin;
@ -240,9 +240,7 @@ export async function queueAttachmentDownloads(
}
if (count > 0) {
await saveMessage(message.attributes, {
Message: Whisper.Message,
});
await saveMessage(message.attributes);
return true;
}

View File

@ -18,13 +18,13 @@ import {
getLatestClosedGroupEncryptionKeyPair,
isKeyPairAlreadySaved,
removeAllClosedGroupEncryptionKeyPairs,
} from '../../js/modules/data';
} from '../../ts/data/data';
import {
ClosedGroupNewMessage,
ClosedGroupNewMessageParams,
} from '../session/messages/outgoing/content/data/group/ClosedGroupNewMessage';
import { ECKeyPair } from './keypairs';
import { ECKeyPair, HexKeyPair } from './keypairs';
import { UserUtils } from '../session/utils';
import { ConversationModel } from '../models/conversation';
import _ from 'lodash';
@ -786,6 +786,9 @@ async function sendLatestKeyPairToUsers(
return;
}
const keyPairToUse =
inMemoryKeyPair || ECKeyPair.fromHexKeyPair(latestKeyPair as HexKeyPair);
const expireTimer = groupConvo.get('expireTimer') || 0;
await Promise.all(
@ -800,7 +803,7 @@ async function sendLatestKeyPairToUsers(
const wrappers = await ClosedGroup.buildEncryptionKeyPairWrappers(
[member],
inMemoryKeyPair || ECKeyPair.fromHexKeyPair(latestKeyPair)
keyPairToUse
);
const keypairsMessage = new ClosedGroupEncryptionPairReplyMessage({

View File

@ -15,7 +15,7 @@ import {
createOrUpdateItem,
getAllEncryptionKeyPairsForGroup,
getItemById,
} from '../../js/modules/data';
} from '../../ts/data/data';
import { ECKeyPair } from './keypairs';
import { handleNewClosedGroup } from './closedGroups';
import { KeyPairRequestManager } from './keyPairRequestManager';

View File

@ -14,7 +14,7 @@ import { ConversationController } from '../session/conversations';
import { handleClosedGroupControlMessage } from './closedGroups';
import { MessageModel } from '../models/message';
import { MessageModelType } from '../models/messageType';
import { getMessageBySender } from '../../js/modules/data';
import { getMessageBySender } from '../../ts/data/data';
export async function updateProfile(
conversation: any,
@ -354,12 +354,11 @@ async function isMessageDuplicate({
const { Errors } = window.Signal.Types;
try {
const result = await getMessageBySender(
{ source, sourceDevice, sent_at: timestamp },
{
Message: MessageModel,
}
);
const result = await getMessageBySender({
source,
sourceDevice,
sent_at: timestamp,
});
if (!result) {
return false;

View File

@ -9,7 +9,7 @@ import { ConversationController } from '../session/conversations';
import { ConversationModel } from '../models/conversation';
import { MessageCollection, MessageModel } from '../models/message';
import { MessageController } from '../session/messages';
import { getMessageById, getMessagesBySentAt } from '../../js/modules/data';
import { getMessageById, getMessagesBySentAt } from '../../ts/data/data';
async function handleGroups(
conversation: ConversationModel,
@ -99,9 +99,7 @@ async function copyFromQuotedMessage(
const { attachments, id, author } = quote;
const firstAttachment = attachments[0];
const collection = await getMessagesBySentAt(id, {
MessageCollection,
});
const collection = await getMessagesBySentAt(id);
const found = collection.find((item: any) => {
const messageAuthor = item.getContact();
@ -555,9 +553,7 @@ export async function handleMessageJob(
// We go to the database here because, between the message save above and
// the previous line's trigger() call, we might have marked all messages
// unread in the database. This message might already be read!
const fetched = await getMessageById(message.get('id'), {
Message: MessageModel,
});
const fetched = await getMessageById(message.get('id'));
const previousUnread = message.get('unread');

View File

@ -3,7 +3,7 @@ import {
getAllGroupsInvolvingId,
removeConversation,
saveConversation,
} from '../../../js/modules/data';
} from '../../../ts/data/data';
import {
ConversationAttributes,
ConversationCollection,
@ -196,9 +196,7 @@ export class ConversationController {
}
public async getAllGroupsInvolvingId(id: string) {
const groups = await getAllGroupsInvolvingId(id, {
ConversationCollection,
});
const groups = await getAllGroupsInvolvingId(id);
return groups.map((group: any) => this.conversations.add(group));
}
@ -232,9 +230,7 @@ export class ConversationController {
await conversation.destroyMessages();
await removeConversation(id, {
Conversation: ConversationModel,
});
await removeConversation(id);
conversation.off('change', this.updateReduxConvoChanged);
this.conversations.remove(conversation);
if (window.inboxStore) {
@ -257,9 +253,7 @@ export class ConversationController {
const load = async () => {
try {
const collection = await getAllConversations({
ConversationCollection,
});
const collection = await getAllConversations();
this.conversations.add(collection.models);

View File

@ -4,7 +4,7 @@ import { PubKey } from '../types';
import { concatUInt8Array, getSodium } from '.';
import { fromHexToArray } from '../utils/String';
export { concatUInt8Array, getSodium };
import { getLatestClosedGroupEncryptionKeyPair } from '../../../js/modules/data';
import { getLatestClosedGroupEncryptionKeyPair } from '../../../ts/data/data';
import { UserUtils } from '../utils';
/**

View File

@ -12,7 +12,7 @@ import {
getIdentityKeyById,
getLatestClosedGroupEncryptionKeyPair,
removeAllClosedGroupEncryptionKeyPairs,
} from '../../../js/modules/data';
} from '../../../ts/data/data';
import uuid from 'uuid';
import { SignalService } from '../../protobuf';
import { generateCurve25519KeyPairWithoutPrefix } from '../crypto';

View File

@ -1,7 +1,7 @@
// You can see MessageController for in memory registered messages.
// Ee register messages to it everytime we send one, so that when an event happens we can find which message it was based on this id.
import { getMessagesByConversation } from '../../../js/modules/data';
import { getMessagesByConversation } from '../../../ts/data/data';
import { ConversationModel } from '../../models/conversation';
import { MessageCollection, MessageModel } from '../../models/message';
@ -71,19 +71,4 @@ export class MessageController {
public get(identifier: string) {
return this.messageLookup.get(identifier);
}
public async getMessagesByKeyFromDb(key: string) {
// loadLive gets messages live, not from the database which can lag behind.
let messages = [];
const messageSet = await getMessagesByConversation(key, {
limit: 100,
MessageCollection,
});
messages = messageSet.models.map(
(conv: ConversationModel) => conv.attributes
);
return messages;
}
}

View File

@ -1,5 +1,5 @@
import { allowOnlyOneAtATime } from '../../../js/modules/loki_primitives';
import { getGuardNodes } from '../../../js/modules/data';
import { getGuardNodes } from '../../../ts/data/data';
import * as SnodePool from '../snode_api/snodePool';
import _ from 'lodash';
import fetch from 'node-fetch';

View File

@ -1,5 +1,5 @@
import _ from 'lodash';
import { createOrUpdateItem, getItemById } from '../../../js/modules/data';
import { createOrUpdateItem, getItemById } from '../../../ts/data/data';
import { PartialRawMessage, RawMessage } from '../types/RawMessage';
import { ContentMessage } from '../messages/outgoing';
import { PubKey } from '../types';

View File

@ -12,7 +12,7 @@ import {
import {
getSwarmNodesForPubkey,
updateSwarmNodesForPubkey,
} from '../../../js/modules/data';
} from '../../../ts/data/data';
import semver from 'semver';
import _ from 'lodash';

View File

@ -9,7 +9,7 @@ import {
getSeenMessagesByHashList,
saveSeenMessageHashes,
updateLastHash,
} from '../../../js/modules/data';
} from '../../../ts/data/data';
import { StringUtils } from '../../session/utils';
import { ConversationController } from '../conversations';

View File

@ -135,7 +135,7 @@ export class OpenGroup {
// Try to connect to server
try {
conversation = await PromiseUtils.timeout(
window.attemptConnection(prefixedServer, channel),
OpenGroup.attemptConnection(prefixedServer, channel),
20000
);
@ -239,4 +239,61 @@ export class OpenGroup {
return `http${hasSSL ? 's' : ''}://${server}`;
}
// Attempts a connection to an open group server
private static async attemptConnection(serverURL: string, channelId: number) {
let completeServerURL = serverURL.toLowerCase();
const valid = OpenGroup.validate(completeServerURL);
if (!valid) {
return new Promise((_resolve, reject) => {
reject(window.i18n('connectToServerFail'));
});
}
// Add http or https prefix to server
completeServerURL = OpenGroup.prefixify(completeServerURL);
const rawServerURL = serverURL
.replace(/^https?:\/\//i, '')
.replace(/[/\\]+$/i, '');
const conversationId = `publicChat:${channelId}@${rawServerURL}`;
// Quickly peak to make sure we don't already have it
const conversationExists = ConversationController.getInstance().get(
conversationId
);
if (conversationExists) {
// We are already a member of this public chat
return new Promise((_resolve, reject) => {
reject(window.i18n('publicChatExists'));
});
}
// Get server
const serverAPI = await window.lokiPublicChatAPI.findOrCreateServer(
completeServerURL
);
// SSL certificate failure or offline
if (!serverAPI) {
// Url incorrect or server not compatible
return new Promise((_resolve, reject) => {
reject(window.i18n('connectToServerFail'));
});
}
// Create conversation
const conversation = await ConversationController.getInstance().getOrCreateAndWait(
conversationId,
'group'
);
// Convert conversation to a public one
await conversation.setPublicSource(completeServerURL, channelId);
// and finally activate it
void conversation.getPublicSendData(); // may want "await" if you want to use the API
return conversation;
}
}

View File

@ -11,7 +11,7 @@ import {
ConfigurationMessageClosedGroup,
} from '../messages/outgoing/content/ConfigurationMessage';
import uuid from 'uuid';
import { getLatestClosedGroupEncryptionKeyPair } from '../../../js/modules/data';
import { getLatestClosedGroupEncryptionKeyPair } from '../../../ts/data/data';
import { UserUtils } from '.';
import { ECKeyPair } from '../../receiver/keypairs';
import _ from 'lodash';

View File

@ -1,6 +1,6 @@
import _ from 'lodash';
import { UserUtils } from '.';
import { getItemById } from '../../../js/modules/data';
import { getItemById } from '../../../ts/data/data';
import { KeyPair } from '../../../libtextsecure/libsignal-protocol';
import { PubKey } from '../types';
import { toHex } from './String';

View File

@ -1,4 +1,4 @@
import { createOrUpdateItem, getItemById } from '../../../js/modules/data';
import { createOrUpdateItem, getItemById } from '../../../ts/data/data';
import { getMessageQueue } from '..';
import { ConversationController } from '../conversations';
import { getCurrentConfigurationMessage } from './Messages';

View File

@ -1,4 +1,4 @@
import { getPasswordHash } from '../../js/modules/data';
import { getPasswordHash } from '../../ts/data/data';
export async function hasPassword() {
const hash = await getPasswordHash();

View File

@ -4,7 +4,7 @@ import { Constants } from '../../session';
import { createAsyncThunk } from '@reduxjs/toolkit';
import { ConversationController } from '../../session/conversations';
import { MessageCollection, MessageModel } from '../../models/message';
import { getMessagesByConversation } from '../../../js/modules/data';
import { getMessagesByConversation } from '../../data/data';
// State
@ -100,7 +100,7 @@ async function getMessages(
window.log.error('Failed to get convo on reducer.');
return [];
}
const unreadCount = (await conversation.getUnreadCount()) as number;
const unreadCount = await conversation.getUnreadCount();
let msgCount =
numMessages ||
Number(Constants.CONVERSATION.DEFAULT_MESSAGE_FETCH_COUNT) + unreadCount;
@ -115,15 +115,15 @@ async function getMessages(
const messageSet = await getMessagesByConversation(conversationKey, {
limit: msgCount,
MessageCollection,
});
// Set first member of series here.
const messageModels = messageSet.models;
const isPublic = conversation.isPublic();
const messagesPickedUp = messageModels.map(makeMessageTypeFromMessageModel);
const sortedMessage = sortMessages(messageModels, isPublic);
const sortedMessage = sortMessages(messagesPickedUp, isPublic);
// no need to do that `firstMessageOfSeries` on a private chat
if (conversation.isPrivate()) {
@ -438,6 +438,10 @@ function getEmptyState(): ConversationsStateType {
};
}
const makeMessageTypeFromMessageModel = (message: MessageModel) => {
return _.pick(message as any, toPickFromMessageModel) as MessageTypeInConvo;
};
function sortMessages(
messages: Array<MessageTypeInConvo>,
isPublic: boolean
@ -472,10 +476,7 @@ function handleMessageAdded(
const { messages } = state;
const { conversationKey, messageModel } = action.payload;
if (conversationKey === state.selectedConversation) {
const addedMessage = _.pick(
messageModel as any,
toPickFromMessageModel
) as MessageTypeInConvo;
const addedMessage = makeMessageTypeFromMessageModel(messageModel);
const messagesWithNewMessage = [...messages, addedMessage];
const convo = state.conversationLookup[state.selectedConversation];
const isPublic = convo?.isPublic || false;

View File

@ -2,7 +2,7 @@ import { omit, reject } from 'lodash';
import { AdvancedSearchOptions, SearchOptions } from '../../types/Search';
import { cleanSearchTerm } from '../../util/cleanSearchTerm';
import { searchConversations, searchMessages } from '../../../js/modules/data';
import { searchConversations, searchMessages } from '../../../ts/data/data';
import { makeLookup } from '../../util/makeLookup';
import {

View File

@ -10,7 +10,7 @@ import { TestUtils } from '../../../test-utils';
import Sinon, * as sinon from 'sinon';
import * as cache from '../../../../receiver/cache';
import * as data from '../../../../../js/modules/data';
import * as data from '../../../../../ts/data/data';
import { EnvelopePlus } from '../../../../receiver/types';
import chaiAsPromised from 'chai-as-promised';

View File

@ -1,5 +1,5 @@
import * as sinon from 'sinon';
import * as DataShape from '../../../../js/modules/data';
import * as DataShape from '../../../../ts/data/data';
import { Application } from 'spectron';
const globalAny: any = global;
@ -8,7 +8,7 @@ const sandbox = sinon.createSandbox();
// We have to do this in a weird way because Data uses module.exports
// which doesn't play well with sinon or ImportMock
// tslint:disable-next-line: no-require-imports no-var-requires
const Data = require('../../../../js/modules/data');
const Data = require('../../../../ts/data/data');
type DataFunction = typeof DataShape;
/**

View File

@ -1,4 +1,4 @@
import { createOrUpdateItem, getItemById } from '../../js/modules/data';
import { createOrUpdateItem, getItemById } from '../../ts/data/data';
import { PubKey } from '../session/types';
import { UserUtils } from '../session/utils';

3
ts/window.d.ts vendored
View File

@ -9,7 +9,6 @@ import { LokiMessageInterface } from '../js/modules/loki_message_api';
import { SwarmPolling } from './session/snode_api/swarmPolling';
import { LibTextsecure } from '../libtextsecure';
import { ConversationType } from '../js/modules/data';
import { RecoveryPhraseUtil } from '../libloki/modules/mnemonic';
import { ConfirmationDialogParams } from '../background';
import {} from 'styled-components/cssprop';
@ -21,6 +20,7 @@ import { MessageController } from './session/messages/MessageController';
import { DefaultTheme } from 'styled-components';
import { ConversationCollection } from './models/conversation';
import { ConversationType } from './state/ducks/conversations';
/*
We declare window stuff here instead of global.d.ts because we are importing other declarations.
@ -42,7 +42,6 @@ declare global {
StubAppDotNetApi: any;
StubMessageAPI: any;
Whisper: any;
attemptConnection: ConversationType;
clearLocalData: any;
clipboard: any;
confirmationDialog: (params: ConfirmationDialogParams) => any;