Backup: zipped messages.json, flat attachments dir

Backup creates, in a target directory:
  - An attachments folder, with all attachments, each named for their
    parent message's id - a GUID. If there is more than one attachment
    in a given message,  each attachment beyond the first will end with
    '-N', zero-indexed.
  - A file named messages.zip. It contains exactly what went to disk in
    the original export code, but zipped up.

Export is now only 'light,' and in this new messages.zip format.

Import supports both the new format and the old format. If the target
directory has a messages.zip file, we'll treat it as the new format.

Next up: Encrypting attachments and the messages.zip!
This commit is contained in:
Scott Nonnenberg 2018-03-12 18:14:24 -07:00
parent 1c6d91b59c
commit 6d8f4b7b6e
No known key found for this signature in database
GPG Key ID: 5F82280C35134661
4 changed files with 343 additions and 65 deletions

View File

@ -2,7 +2,6 @@
/* global dcodeIO: false */
/* global _: false */
/* global textsecure: false */
/* global moment: false */
/* global i18n: false */
/* eslint-env browser */
@ -13,6 +12,11 @@
const fs = require('fs');
const path = require('path');
const tmp = require('tmp');
const decompress = require('decompress');
const pify = require('pify');
const archiver = require('archiver');
const rimraf = require('rimraf');
const electronRemote = require('electron').remote;
const {
@ -23,13 +27,13 @@ const {
module.exports = {
getDirectoryForExport,
exportToDirectory,
backupToDirectory,
getDirectoryForImport,
importFromDirectory,
// for testing
sanitizeFileName,
trimFileName,
getAttachmentFileName,
getExportAttachmentFileName,
getConversationDirName,
getConversationLoggingName,
};
@ -102,12 +106,12 @@ function createOutputStream(writer) {
};
}
async function exportNonMessages(db, parent) {
async function exportContactAndGroupsToFile(db, parent) {
const writer = await createFileAndWriter(parent, 'db.json');
return exportToJsonFile(db, writer);
return exportContactsAndGroups(db, writer);
}
function exportToJsonFile(db, fileWriter) {
function exportContactsAndGroups(db, fileWriter) {
return new Promise((resolve, reject) => {
let storeNames = db.objectStoreNames;
storeNames = _.without(
@ -347,12 +351,18 @@ function createDirectory(parent, name) {
return new Promise((resolve, reject) => {
const sanitized = sanitizeFileName(name);
const targetDir = path.join(parent, sanitized);
if (fs.existsSync(targetDir)) {
resolve(targetDir);
return;
}
fs.mkdir(targetDir, (error) => {
if (error) {
return reject(error);
reject(error);
return;
}
return resolve(targetDir);
resolve(targetDir);
});
});
}
@ -381,9 +391,8 @@ function readFileAsText(parent, name) {
});
}
function readFileAsArrayBuffer(parent, name) {
function readFileAsArrayBuffer(targetPath) {
return new Promise((resolve, reject) => {
const targetPath = path.join(parent, name);
// omitting the encoding to get a buffer back
fs.readFile(targetPath, (error, buffer) => {
if (error) {
@ -413,7 +422,7 @@ function trimFileName(filename) {
}
function getAttachmentFileName(attachment) {
function getExportAttachmentFileName(message, index, attachment) {
if (attachment.fileName) {
return trimFileName(attachment.fileName);
}
@ -428,25 +437,44 @@ function getAttachmentFileName(attachment) {
return name;
}
async function readAttachment(parent, message, attachment) {
const name = getAttachmentFileName(attachment);
const sanitized = sanitizeFileName(name);
const attachmentDir = path.join(parent, message.received_at.toString());
attachment.data = await readFileAsArrayBuffer(attachmentDir, sanitized);
function getAnonymousAttachmentFileName(message, index) {
if (!index) {
return message.id;
}
return `${message.id}-${index}`;
}
async function writeAttachment(dir, attachment) {
const filename = getAttachmentFileName(attachment);
async function readAttachment(dir, attachment, name) {
const anonymousName = sanitizeFileName(name);
const targetPath = path.join(dir, anonymousName);
if (!fs.existsSync(targetPath)) {
console.log(`Warning: attachment ${anonymousName} not found`);
return;
}
attachment.data = await readFileAsArrayBuffer(targetPath);
}
async function writeAttachment(dir, message, index, attachment) {
const filename = getAnonymousAttachmentFileName(message, index);
const target = path.join(dir, filename);
if (fs.existsSync(target)) {
console.log(`Skipping attachment ${filename}; already exists`);
return;
}
const writer = await createFileAndWriter(dir, filename);
const stream = createOutputStream(writer);
stream.write(Buffer.from(attachment.data));
return stream.close();
await stream.close();
}
async function writeAttachments(parentDir, name, messageId, attachments) {
const dir = await createDirectory(parentDir, messageId);
const promises = _.map(attachments, attachment => writeAttachment(dir, attachment));
async function writeAttachments(dir, name, message, attachments) {
const promises = _.map(
attachments,
(attachment, index) => writeAttachment(dir, message, index, attachment)
);
try {
await Promise.all(promises);
} catch (error) {
@ -464,10 +492,27 @@ function sanitizeFileName(filename) {
return filename.toString().replace(/[^a-z0-9.,+()'#\- ]/gi, '_');
}
async function exportConversation(db, name, conversation, dir) {
async function exportConversation(db, conversation, options) {
options = options || {};
const {
name,
dir,
attachmentsDir,
} = options;
if (!name) {
throw new Error('Need a name!');
}
if (!dir) {
throw new Error('Need a target directory!');
}
if (!attachmentsDir) {
throw new Error('Need an attachments directory!');
}
console.log('exporting conversation', name);
const writer = await createFileAndWriter(dir, 'messages.json');
return new Promise((resolve, reject) => {
return new Promise(async (resolve, reject) => {
const transaction = db.transaction('messages', 'readwrite');
transaction.onerror = () => {
Whisper.Database.handleDOMException(
@ -505,7 +550,6 @@ async function exportConversation(db, name, conversation, dir) {
const cursor = event.target.result;
if (cursor) {
const message = cursor.value;
const messageId = message.received_at;
const { attachments } = message;
// skip message if it is disappearing, no matter the amount of time left
@ -539,9 +583,11 @@ async function exportConversation(db, name, conversation, dir) {
stream.write(jsonString);
if (attachments && attachments.length) {
const process = () => writeAttachments(dir, name, messageId, attachments);
const exportAttachments = () =>
writeAttachments(attachmentsDir, name, message, attachments);
// eslint-disable-next-line more/no-then
promiseChain = promiseChain.then(process);
promiseChain = promiseChain.then(exportAttachments);
}
count += 1;
@ -598,7 +644,20 @@ function getConversationLoggingName(conversation) {
return name;
}
function exportConversations(db, parentDir) {
function exportConversations(db, options) {
options = options || {};
const {
messagesDir,
attachmentsDir,
} = options;
if (!messagesDir) {
return Promise.reject(new Error('Need a messages directory!'));
}
if (!attachmentsDir) {
return Promise.reject(new Error('Need an attachments directory!'));
}
return new Promise((resolve, reject) => {
const transaction = db.transaction('conversations', 'readwrite');
transaction.onerror = () => {
@ -630,8 +689,12 @@ function exportConversations(db, parentDir) {
const name = getConversationLoggingName(conversation);
const process = async () => {
const dir = await createDirectory(parentDir, dirName);
return exportConversation(db, name, conversation, dir);
const dir = await createDirectory(messagesDir, dirName);
return exportConversation(db, conversation, {
name,
dir,
attachmentsDir,
});
};
console.log('scheduling export for conversation', name);
@ -688,12 +751,11 @@ function getDirContents(dir) {
});
}
function loadAttachments(dir, message) {
const promises = _.map(message.attachments, attachment => readAttachment(
dir,
message,
attachment
));
function loadAttachments(dir, message, getName) {
const promises = _.map(message.attachments, (attachment, index) => {
const name = getName(message, index, attachment);
return readAttachment(dir, attachment, name);
});
return Promise.all(promises);
}
@ -765,7 +827,11 @@ async function importConversation(db, dir, options) {
options = options || {};
_.defaults(options, { messageLookup: {} });
const { messageLookup } = options;
const {
messageLookup,
attachmentsDir,
} = options;
let conversationId = 'unknown';
let total = 0;
let skipped = 0;
@ -794,13 +860,18 @@ async function importConversation(db, dir, options) {
}
if (message.attachments && message.attachments.length) {
const process = async () => {
await loadAttachments(dir, message);
const importMessage = async () => {
const getName = attachmentsDir
? getAnonymousAttachmentFileName
: getExportAttachmentFileName;
const parent = attachmentsDir || path.join(dir, message.received_at.toString());
await loadAttachments(parent, message, getName);
return saveMessage(db, message);
};
// eslint-disable-next-line more/no-then
promiseChain = promiseChain.then(process);
promiseChain = promiseChain.then(importMessage);
return false;
}
@ -825,7 +896,6 @@ async function importConversation(db, dir, options) {
async function importConversations(db, dir, options) {
const contents = await getDirContents(dir);
let promiseChain = Promise.resolve();
_.forEach(contents, (conversationDir) => {
@ -833,10 +903,10 @@ async function importConversations(db, dir, options) {
return;
}
const process = () => importConversation(db, conversationDir, options);
const loadConversation = () => importConversation(db, conversationDir, options);
// eslint-disable-next-line more/no-then
promiseChain = promiseChain.then(process);
promiseChain = promiseChain.then(loadConversation);
});
return promiseChain;
@ -908,10 +978,6 @@ function assembleLookup(db, storeName, keyFunction) {
});
}
function getTimestamp() {
return moment().format('YYYY MMM Do [at] h.mm.ss a');
}
function getDirectoryForExport() {
const options = {
title: i18n('exportChooserTitle'),
@ -920,22 +986,72 @@ function getDirectoryForExport() {
return getDirectory(options);
}
async function exportToDirectory(directory) {
const name = `Signal Export ${getTimestamp()}`;
function createZip(zipDir, targetDir) {
return new Promise((resolve, reject) => {
const target = path.join(zipDir, 'messages.zip');
const output = fs.createWriteStream(target);
const archive = archiver('zip', {
cwd: targetDir,
});
output.on('close', () => {
resolve(target);
});
archive.on('warning', (error) => {
console.log(`Archive generation warning: ${error.stack}`);
});
archive.on('error', reject);
archive.pipe(output);
archive.directory(targetDir, '');
archive.finalize();
});
}
function createTempDir() {
return pify(tmp.dir)();
}
function deleteAll(pattern) {
console.log(`Deleting ${pattern}`);
return pify(rimraf)(pattern);
}
async function backupToDirectory(directory) {
let tempDir;
try {
tempDir = await createTempDir();
const db = await Whisper.Database.open();
const dir = await createDirectory(directory, name);
await exportNonMessages(db, dir);
await exportConversations(db, dir);
const attachmentsDir = await createDirectory(directory, 'attachments');
await exportContactAndGroupsToFile(db, tempDir);
await exportConversations(db, {
messagesDir: tempDir,
attachmentsDir,
});
await createZip(directory, tempDir);
// now that we've made the zip file, we can delete the temp messages directory
await deleteAll(tempDir);
tempDir = null;
console.log('done backing up!');
return dir;
return directory;
} catch (error) {
console.log(
'the backup went wrong:',
error && error.stack ? error.stack : error
);
throw error;
} finally {
if (tempDir) {
await deleteAll(tempDir);
}
}
}
@ -964,9 +1080,35 @@ async function importFromDirectory(directory, options) {
groupLookup,
});
const zipPath = path.join(directory, 'messages.zip');
if (fs.existsSync(zipPath)) {
// we're in the world of an encrypted, zipped backup
let tempDir;
try {
tempDir = await createTempDir();
const attachmentsDir = path.join(directory, 'attachments');
await decompress(zipPath, tempDir);
options = Object.assign({}, options, {
attachmentsDir,
});
const result = await importNonMessages(db, tempDir, options);
await importConversations(db, tempDir, options);
console.log('done importing from backup!');
return result;
} finally {
if (tempDir) {
await deleteAll(tempDir);
}
}
}
const result = await importNonMessages(db, directory, options);
await importConversations(db, directory, options);
console.log('done restoring from backup!');
console.log('done importing!');
return result;
} catch (error) {
console.log(

View File

@ -44,11 +44,13 @@
"open-coverage": "open coverage/lcov-report/index.html"
},
"dependencies": {
"archiver": "^2.1.1",
"blob-util": "^1.3.0",
"blueimp-canvas-to-blob": "^3.14.0",
"blueimp-load-image": "^2.18.0",
"bunyan": "^1.8.12",
"config": "^1.28.1",
"decompress": "^4.2.0",
"electron-config": "^1.0.0",
"electron-editor-context-menu": "^1.1.1",
"electron-is-dev": "^0.3.0",
@ -67,12 +69,14 @@
"node-fetch": "https://github.com/scottnonnenberg/node-fetch.git#3e5f51e08c647ee5f20c43b15cf2d352d61c36b4",
"node-notifier": "^5.1.2",
"os-locale": "^2.1.0",
"pify": "^3.0.0",
"proxy-agent": "^2.1.0",
"read-last-lines": "^1.3.0",
"rimraf": "^2.6.2",
"semver": "^5.4.1",
"spellchecker": "^3.4.4",
"testcheck": "^1.0.0-rc.2",
"tmp": "^0.0.33",
"websocket": "^1.0.25"
},
"devDependencies": {
@ -105,8 +109,7 @@
"nsp": "^3.2.1",
"nyc": "^11.4.1",
"sinon": "^4.4.2",
"spectron": "^3.8.0",
"tmp": "^0.0.33"
"spectron": "^3.8.0"
},
"engines": {
"node": "8.2.1"

View File

@ -35,13 +35,13 @@ describe('Backup', function() {
});
});
describe('getAttachmentFileName', function() {
describe('getExportAttachmentFileName', function() {
it('uses original filename if attachment has one', function() {
var attachment = {
fileName: 'blah.jpg'
};
var expected = 'blah.jpg';
assert.strictEqual(Signal.Backup.getAttachmentFileName(attachment), expected);
assert.strictEqual(Signal.Backup.getExportAttachmentFileName(attachment), expected);
});
it('uses attachment id if no filename', function() {
@ -49,7 +49,7 @@ describe('Backup', function() {
id: '123'
};
var expected = '123';
assert.strictEqual(Signal.Backup.getAttachmentFileName(attachment), expected);
assert.strictEqual(Signal.Backup.getExportAttachmentFileName(attachment), expected);
});
it('uses filename and contentType if available', function() {
@ -58,7 +58,7 @@ describe('Backup', function() {
contentType: 'image/jpeg'
};
var expected = '123.jpeg';
assert.strictEqual(Signal.Backup.getAttachmentFileName(attachment), expected);
assert.strictEqual(Signal.Backup.getExportAttachmentFileName(attachment), expected);
});
it('handles strange contentType', function() {
@ -67,7 +67,7 @@ describe('Backup', function() {
contentType: 'something'
};
var expected = '123.something';
assert.strictEqual(Signal.Backup.getAttachmentFileName(attachment), expected);
assert.strictEqual(Signal.Backup.getExportAttachmentFileName(attachment), expected);
});
});

139
yarn.lock
View File

@ -192,6 +192,19 @@ archiver-utils@^1.3.0:
normalize-path "^2.0.0"
readable-stream "^2.0.0"
archiver@^2.1.1:
version "2.1.1"
resolved "https://registry.yarnpkg.com/archiver/-/archiver-2.1.1.tgz#ff662b4a78201494a3ee544d3a33fe7496509ebc"
dependencies:
archiver-utils "^1.3.0"
async "^2.0.0"
buffer-crc32 "^0.2.1"
glob "^7.0.0"
lodash "^4.8.0"
readable-stream "^2.0.0"
tar-stream "^1.5.0"
zip-stream "^1.2.0"
archiver@~2.1.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/archiver/-/archiver-2.1.0.tgz#d2df2e8d5773a82c1dcce925ccc41450ea999afd"
@ -443,6 +456,10 @@ balanced-match@^0.4.1:
version "0.4.2"
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-0.4.2.tgz#cb3f3e3c732dc0f01ee70b403f302e61d7709838"
base64-js@0.0.8:
version "0.0.8"
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-0.0.8.tgz#1101e9544f4a76b1bc3b26d452ca96d7a35e7978"
base64-js@1.2.0, base64-js@^1.0.2:
version "1.2.0"
resolved "https://registry.yarnpkg.com/base64-js/-/base64-js-1.2.0.tgz#a39992d723584811982be5e290bb6a53d86700f1"
@ -581,7 +598,7 @@ browser-stdout@1.3.0:
version "1.3.0"
resolved "https://registry.yarnpkg.com/browser-stdout/-/browser-stdout-1.3.0.tgz#f351d32969d32fa5d7a5567154263d928ae3bd1f"
buffer-crc32@^0.2.1:
buffer-crc32@^0.2.1, buffer-crc32@~0.2.3:
version "0.2.13"
resolved "https://registry.yarnpkg.com/buffer-crc32/-/buffer-crc32-0.2.13.tgz#0d333e3f00eac50aa1454abd30ef8c2a5d9a7242"
@ -597,6 +614,14 @@ buffer@4.9.1:
ieee754 "^1.1.4"
isarray "^1.0.0"
buffer@^3.0.1:
version "3.6.0"
resolved "https://registry.yarnpkg.com/buffer/-/buffer-3.6.0.tgz#a72c936f77b96bf52f5f7e7b467180628551defb"
dependencies:
base64-js "0.0.8"
ieee754 "^1.1.4"
isarray "^1.0.0"
buffers@~0.1.1:
version "0.1.1"
resolved "https://registry.yarnpkg.com/buffers/-/buffers-0.1.1.tgz#b24579c3bed4d6d396aeee6d9a8ae7f5482ab7bb"
@ -911,6 +936,12 @@ commander@^2.9.0, commander@~2.9.0:
dependencies:
graceful-readlink ">= 1.0.0"
commander@~2.8.1:
version "2.8.1"
resolved "https://registry.yarnpkg.com/commander/-/commander-2.8.1.tgz#06be367febfda0c330aa1e2a072d3dc9762425d4"
dependencies:
graceful-readlink ">= 1.0.0"
comment-parser@^0.3.1:
version "0.3.1"
resolved "https://registry.yarnpkg.com/comment-parser/-/comment-parser-0.3.1.tgz#fd657aac8c1492d308c9a6100fc9b49d2435aba1"
@ -1184,6 +1215,41 @@ decompress-response@^3.3.0:
dependencies:
mimic-response "^1.0.0"
decompress-tar@^4.0.0, decompress-tar@^4.1.0, decompress-tar@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/decompress-tar/-/decompress-tar-4.1.1.tgz#718cbd3fcb16209716e70a26b84e7ba4592e5af1"
dependencies:
file-type "^5.2.0"
is-stream "^1.1.0"
tar-stream "^1.5.2"
decompress-tarbz2@^4.0.0:
version "4.1.1"
resolved "https://registry.yarnpkg.com/decompress-tarbz2/-/decompress-tarbz2-4.1.1.tgz#3082a5b880ea4043816349f378b56c516be1a39b"
dependencies:
decompress-tar "^4.1.0"
file-type "^6.1.0"
is-stream "^1.1.0"
seek-bzip "^1.0.5"
unbzip2-stream "^1.0.9"
decompress-targz@^4.0.0:
version "4.1.1"
resolved "https://registry.yarnpkg.com/decompress-targz/-/decompress-targz-4.1.1.tgz#c09bc35c4d11f3de09f2d2da53e9de23e7ce1eee"
dependencies:
decompress-tar "^4.1.1"
file-type "^5.2.0"
is-stream "^1.1.0"
decompress-unzip@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/decompress-unzip/-/decompress-unzip-4.0.1.tgz#deaaccdfd14aeaf85578f733ae8210f9b4848f69"
dependencies:
file-type "^3.8.0"
get-stream "^2.2.0"
pify "^2.3.0"
yauzl "^2.4.2"
decompress-zip@0.3.0:
version "0.3.0"
resolved "https://registry.yarnpkg.com/decompress-zip/-/decompress-zip-0.3.0.tgz#ae3bcb7e34c65879adfe77e19c30f86602b4bdb0"
@ -1196,6 +1262,19 @@ decompress-zip@0.3.0:
readable-stream "^1.1.8"
touch "0.0.3"
decompress@^4.2.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/decompress/-/decompress-4.2.0.tgz#7aedd85427e5a92dacfe55674a7c505e96d01f9d"
dependencies:
decompress-tar "^4.0.0"
decompress-tarbz2 "^4.0.0"
decompress-targz "^4.0.0"
decompress-unzip "^4.0.1"
graceful-fs "^4.1.10"
make-dir "^1.0.0"
pify "^2.3.0"
strip-dirs "^2.0.0"
deep-eql@^3.0.0:
version "3.0.1"
resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-3.0.1.tgz#dfc9404400ad1c8fe023e7da1df1c147c4b444df"
@ -1931,10 +2010,18 @@ file-sync-cmp@^0.1.0:
version "0.1.1"
resolved "https://registry.yarnpkg.com/file-sync-cmp/-/file-sync-cmp-0.1.1.tgz#a5e7a8ffbfa493b43b923bbd4ca89a53b63b612b"
file-type@^3.1.0:
file-type@^3.1.0, file-type@^3.8.0:
version "3.9.0"
resolved "https://registry.yarnpkg.com/file-type/-/file-type-3.9.0.tgz#257a078384d1db8087bc449d107d52a52672b9e9"
file-type@^5.2.0:
version "5.2.0"
resolved "https://registry.yarnpkg.com/file-type/-/file-type-5.2.0.tgz#2ddbea7c73ffe36368dfae49dc338c058c2b8ad6"
file-type@^6.1.0:
version "6.2.0"
resolved "https://registry.yarnpkg.com/file-type/-/file-type-6.2.0.tgz#e50cd75d356ffed4e306dc4f5bcf52a79903a919"
file-uri-to-path@1:
version "1.0.0"
resolved "https://registry.yarnpkg.com/file-uri-to-path/-/file-uri-to-path-1.0.0.tgz#553a7b8446ff6f684359c445f1e37a05dacc33dd"
@ -2192,6 +2279,13 @@ get-stream@3.0.0, get-stream@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-3.0.0.tgz#8e943d1358dc37555054ecbe2edb05aa174ede14"
get-stream@^2.2.0:
version "2.3.1"
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-2.3.1.tgz#5f38f93f346009666ee0150a054167f91bdd95de"
dependencies:
object-assign "^4.0.1"
pinkie-promise "^2.0.0"
get-uri@^2.0.0:
version "2.0.1"
resolved "https://registry.yarnpkg.com/get-uri/-/get-uri-2.0.1.tgz#dbdcacacd8c608a38316869368117697a1631c59"
@ -2371,7 +2465,7 @@ got@^8.2.0:
url-parse-lax "^3.0.0"
url-to-options "^1.0.1"
graceful-fs@^4.1.0, graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.3, graceful-fs@^4.1.6, graceful-fs@^4.1.9:
graceful-fs@^4.1.0, graceful-fs@^4.1.10, graceful-fs@^4.1.11, graceful-fs@^4.1.2, graceful-fs@^4.1.3, graceful-fs@^4.1.6, graceful-fs@^4.1.9:
version "4.1.11"
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.1.11.tgz#0e8bdfe4d1ddb8854d64e04ea7c00e2a026e5658"
@ -2888,6 +2982,10 @@ is-installed-globally@^0.1.0:
global-dirs "^0.1.0"
is-path-inside "^1.0.0"
is-natural-number@^4.0.1:
version "4.0.1"
resolved "https://registry.yarnpkg.com/is-natural-number/-/is-natural-number-4.0.1.tgz#ab9d76e1db4ced51e35de0c72ebecf09f734cde8"
is-npm@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/is-npm/-/is-npm-1.0.0.tgz#f2fb63a65e4905b406c86072765a1a4dc793b9f4"
@ -4757,6 +4855,12 @@ scss-tokenizer@^0.2.1:
js-base64 "^2.1.8"
source-map "^0.4.2"
seek-bzip@^1.0.5:
version "1.0.5"
resolved "https://registry.yarnpkg.com/seek-bzip/-/seek-bzip-1.0.5.tgz#cfe917cb3d274bcffac792758af53173eb1fabdc"
dependencies:
commander "~2.8.1"
semver-diff@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/semver-diff/-/semver-diff-2.1.0.tgz#4bbb8437c8d37e4b0cf1a68fd726ec6d645d6d36"
@ -5109,6 +5213,12 @@ strip-bom@^3.0.0:
version "3.0.0"
resolved "https://registry.yarnpkg.com/strip-bom/-/strip-bom-3.0.0.tgz#2334c18e9c759f7bdd56fdef7e9ae3d588e68ed3"
strip-dirs@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/strip-dirs/-/strip-dirs-2.1.0.tgz#4987736264fc344cf20f6c34aca9d13d1d4ed6c5"
dependencies:
is-natural-number "^4.0.1"
strip-eof@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
@ -5197,6 +5307,15 @@ tar-stream@^1.5.0:
readable-stream "^2.0.0"
xtend "^4.0.0"
tar-stream@^1.5.2:
version "1.5.5"
resolved "https://registry.yarnpkg.com/tar-stream/-/tar-stream-1.5.5.tgz#5cad84779f45c83b1f2508d96b09d88c7218af55"
dependencies:
bl "^1.0.0"
end-of-stream "^1.0.0"
readable-stream "^2.0.0"
xtend "^4.0.0"
tar@^2.0.0:
version "2.2.1"
resolved "https://registry.yarnpkg.com/tar/-/tar-2.2.1.tgz#8e4d2a256c0e2185c6b18ad694aec968b83cb1d1"
@ -5414,6 +5533,13 @@ uglify-to-browserify@~1.0.0:
version "1.0.2"
resolved "https://registry.yarnpkg.com/uglify-to-browserify/-/uglify-to-browserify-1.0.2.tgz#6e0924d6bda6b5afe349e39a6d632850a0f882b7"
unbzip2-stream@^1.0.9:
version "1.2.5"
resolved "https://registry.yarnpkg.com/unbzip2-stream/-/unbzip2-stream-1.2.5.tgz#73a033a567bbbde59654b193c44d48a7e4f43c47"
dependencies:
buffer "^3.0.1"
through "^2.3.6"
underscore.string@~3.2.3:
version "3.2.3"
resolved "https://registry.yarnpkg.com/underscore.string/-/underscore.string-3.2.3.tgz#806992633665d5e5fcb4db1fb3a862eb68e9e6da"
@ -5894,6 +6020,13 @@ yauzl@2.4.1:
dependencies:
fd-slicer "~1.0.1"
yauzl@^2.4.2:
version "2.9.1"
resolved "https://registry.yarnpkg.com/yauzl/-/yauzl-2.9.1.tgz#a81981ea70a57946133883f029c5821a89359a7f"
dependencies:
buffer-crc32 "~0.2.3"
fd-slicer "~1.0.1"
zip-stream@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/zip-stream/-/zip-stream-1.2.0.tgz#a8bc45f4c1b49699c6b90198baacaacdbcd4ba04"