tor-browser

The Tor Browser
git clone https://git.dasho.dev/tor-browser.git
Log | Files | Refs | README | LICENSE

test_BackupService_archive.js (15253B)


      1 /* Any copyright is dedicated to the Public Domain.
      2 https://creativecommons.org/publicdomain/zero/1.0/ */
      3 
      4 "use strict";
      5 
      6 const { ArchiveEncryptionState } = ChromeUtils.importESModule(
      7  "resource:///modules/backup/ArchiveEncryptionState.sys.mjs"
      8 );
      9 const { ArchiveUtils } = ChromeUtils.importESModule(
     10  "resource:///modules/backup/ArchiveUtils.sys.mjs"
     11 );
     12 const { ArchiveDecryptor } = ChromeUtils.importESModule(
     13  "resource:///modules/backup/ArchiveEncryption.sys.mjs"
     14 );
     15 const { DecoderDecryptorTransformer, FileWriterStream } =
     16  ChromeUtils.importESModule(
     17    "resource:///modules/backup/BackupService.sys.mjs"
     18  );
     19 
     20 let testProfilePath;
     21 let fakeCompressedStagingPath;
     22 let archiveTemplateFile = do_get_file("data/test_archive.template.html");
     23 let archiveTemplateURI = Services.io.newFileURI(archiveTemplateFile).spec;
     24 
     25 const SIZE_IN_BYTES = 125123;
     26 let fakeBytes;
     27 
     28 async function assertExtractionsMatch(extractionPath) {
     29  let writtenBytes = await IOUtils.read(extractionPath);
     30  assertUint8ArraysSimilarity(
     31    writtenBytes,
     32    fakeBytes,
     33    true /* expectSimilar */
     34  );
     35 }
     36 
     37 add_setup(async () => {
     38  testProfilePath = await IOUtils.createUniqueDirectory(
     39    PathUtils.tempDir,
     40    "testCreateArchive"
     41  );
     42 
     43  fakeCompressedStagingPath = PathUtils.join(
     44    testProfilePath,
     45    "fake-compressed-staging.zip"
     46  );
     47 
     48  // Let's create a large chunk of nonsense data that we can pretend is the
     49  // compressed archive just to make sure that we can get it back out again.
     50  // Instead of putting a large file inside of version control, we
     51  // deterministically generate some nonsense data inside of a Uint8Array to
     52  // encode. Generating the odd positive integer sequence seems like a decent
     53  // enough mechanism for deterministically generating nonsense data. We ensure
     54  // that the number of bytes written is not a multiple of 6 so that we can
     55  // ensure that base64 padding is working.
     56  fakeBytes = new Uint8Array(SIZE_IN_BYTES);
     57 
     58  // seededRandomNumberGenerator is defined in head.js, but eslint doesn't seem
     59  // happy about it. Maybe that's because it's a generator function.
     60  // eslint-disable-next-line no-undef
     61  let gen = seededRandomNumberGenerator();
     62  for (let i = 0; i < SIZE_IN_BYTES; ++i) {
     63    fakeBytes.set(gen.next().value, i);
     64  }
     65 
     66  await IOUtils.write(fakeCompressedStagingPath, fakeBytes);
     67 
     68  OSKeyStoreTestUtils.setup();
     69 
     70  registerCleanupFunction(async () => {
     71    await OSKeyStoreTestUtils.cleanup();
     72    await maybeRemovePath(testProfilePath);
     73  });
     74 });
     75 
     76 /**
     77 * Tests that a single-file archive can be created from some file on the
     78 * file system and not be encrypted. This is a bit more integration-y, since
     79 * it's also testing the Archive.worker.mjs script - but that script is
     80 * basically an extension of createArchive that lets it operate off of the
     81 * main thread.
     82 */
     83 add_task(async function test_createArchive_unencrypted() {
     84  let bs = new BackupService();
     85 
     86  const FAKE_ARCHIVE_PATH = PathUtils.join(
     87    testProfilePath,
     88    "fake-unencrypted-archive.html"
     89  );
     90 
     91  await bs.createArchive(
     92    FAKE_ARCHIVE_PATH,
     93    archiveTemplateURI,
     94    fakeCompressedStagingPath,
     95    null /* no ArchiveEncryptionState */,
     96    FAKE_METADATA
     97  );
     98 
     99  let { isEncrypted, archiveJSON } = await bs.sampleArchive(FAKE_ARCHIVE_PATH);
    100  Assert.ok(!isEncrypted, "Should not be considered encrypted.");
    101  Assert.deepEqual(
    102    archiveJSON.meta,
    103    FAKE_METADATA,
    104    "Metadata was encoded in the archive JSON block."
    105  );
    106 
    107  const EXTRACTION_PATH = PathUtils.join(testProfilePath, "extraction.bin");
    108  await bs.extractCompressedSnapshotFromArchive(
    109    FAKE_ARCHIVE_PATH,
    110    EXTRACTION_PATH
    111  );
    112 
    113  assertExtractionsMatch(EXTRACTION_PATH);
    114 
    115  await maybeRemovePath(FAKE_ARCHIVE_PATH);
    116  await maybeRemovePath(EXTRACTION_PATH);
    117 });
    118 
    119 /**
    120 * Tests that a single-file archive can be created from some file on the
    121 * file system and be encrypted and decrypted. This is a bit more integration-y,
    122 * since it's also testing the Archive.worker.mjs script - but that script is
    123 * basically an extension of createArchive that lets it operate off of the
    124 * main thread.
    125 */
    126 add_task(async function test_createArchive_encrypted() {
    127  const TEST_RECOVERY_CODE = "This is some recovery code.";
    128 
    129  let bs = new BackupService();
    130  let { instance: encState } =
    131    await ArchiveEncryptionState.initialize(TEST_RECOVERY_CODE);
    132 
    133  const FAKE_ARCHIVE_PATH = PathUtils.join(
    134    testProfilePath,
    135    "fake-encrypted-archive.html"
    136  );
    137 
    138  await bs.createArchive(
    139    FAKE_ARCHIVE_PATH,
    140    archiveTemplateURI,
    141    fakeCompressedStagingPath,
    142    encState,
    143    FAKE_METADATA
    144  );
    145 
    146  let { isEncrypted, archiveJSON } = await bs.sampleArchive(FAKE_ARCHIVE_PATH);
    147  Assert.ok(isEncrypted, "Should be considered encrypted.");
    148  Assert.deepEqual(
    149    archiveJSON.meta,
    150    FAKE_METADATA,
    151    "Metadata was encoded in the archive JSON block."
    152  );
    153 
    154  const EXTRACTION_PATH = PathUtils.join(testProfilePath, "extraction.bin");
    155 
    156  // This should fail, since the archive is encrypted.
    157  await Assert.rejects(
    158    bs.extractCompressedSnapshotFromArchive(FAKE_ARCHIVE_PATH, EXTRACTION_PATH),
    159    /recovery code is required/
    160  );
    161 
    162  await bs.extractCompressedSnapshotFromArchive(
    163    FAKE_ARCHIVE_PATH,
    164    EXTRACTION_PATH,
    165    TEST_RECOVERY_CODE
    166  );
    167 
    168  assertExtractionsMatch(EXTRACTION_PATH);
    169 
    170  await maybeRemovePath(FAKE_ARCHIVE_PATH);
    171  await maybeRemovePath(EXTRACTION_PATH);
    172 });
    173 
    174 /**
    175 * Tests that an archive can be created where the bytes of the archive are
    176 * a multiple of 6, but the individual chunks of those bytes are not a multiple
    177 * of 6 (which will necessitate base64 padding).
    178 */
    179 add_task(async function test_createArchive_multiple_of_six_test() {
    180  let bs = new BackupService();
    181 
    182  const FAKE_ARCHIVE_PATH = PathUtils.join(
    183    testProfilePath,
    184    "fake-unencrypted-archive.html"
    185  );
    186  const FAKE_COMPRESSED_FILE = PathUtils.join(
    187    testProfilePath,
    188    "fake-compressed-staging-mul6.zip"
    189  );
    190 
    191  // Instead of generating a gigantic chunk of data to test this particular
    192  // case, we'll override the default chunk size. We'll choose a chunk size of
    193  // 500 bytes, which doesn't divide evenly by 6 - but we'll encode a set of
    194  // 6 * 500 bytes, which will naturally divide evenly by 6.
    195  const NOT_MULTIPLE_OF_SIX_OVERRIDE_CHUNK_SIZE = 500;
    196  const MULTIPLE_OF_SIX_SIZE_IN_BYTES = 6 * 500;
    197  let multipleOfSixBytes = new Uint8Array(MULTIPLE_OF_SIX_SIZE_IN_BYTES);
    198 
    199  // seededRandomNumberGenerator is defined in head.js, but eslint doesn't seem
    200  // happy about it. Maybe that's because it's a generator function.
    201  // eslint-disable-next-line no-undef
    202  let gen = seededRandomNumberGenerator();
    203  for (let i = 0; i < MULTIPLE_OF_SIX_SIZE_IN_BYTES; ++i) {
    204    multipleOfSixBytes.set(gen.next().value, i);
    205  }
    206 
    207  await IOUtils.write(FAKE_COMPRESSED_FILE, multipleOfSixBytes);
    208 
    209  await bs.createArchive(
    210    FAKE_ARCHIVE_PATH,
    211    archiveTemplateURI,
    212    FAKE_COMPRESSED_FILE,
    213    null /* no ArchiveEncryptionState */,
    214    FAKE_METADATA,
    215    {
    216      chunkSize: NOT_MULTIPLE_OF_SIX_OVERRIDE_CHUNK_SIZE,
    217    }
    218  );
    219 
    220  const EXTRACTION_PATH = PathUtils.join(testProfilePath, "extraction.bin");
    221  await bs.extractCompressedSnapshotFromArchive(
    222    FAKE_ARCHIVE_PATH,
    223    EXTRACTION_PATH
    224  );
    225 
    226  let writtenBytes = await IOUtils.read(EXTRACTION_PATH);
    227  assertUint8ArraysSimilarity(
    228    writtenBytes,
    229    multipleOfSixBytes,
    230    true /* expectSimilar */
    231  );
    232 
    233  await maybeRemovePath(FAKE_COMPRESSED_FILE);
    234  await maybeRemovePath(FAKE_ARCHIVE_PATH);
    235  await maybeRemovePath(EXTRACTION_PATH);
    236 });
    237 
    238 /**
    239 * Tests that if an encrypted single-file archive has had its binary blob
    240 * truncated that the decryption fails and the recovery.zip file is
    241 * automatically destroyed.
    242 */
    243 add_task(async function test_createArchive_encrypted_truncated() {
    244  const TEST_RECOVERY_CODE = "This is some recovery code.";
    245 
    246  let bs = new BackupService();
    247  let { instance: encState } =
    248    await ArchiveEncryptionState.initialize(TEST_RECOVERY_CODE);
    249 
    250  const FAKE_ARCHIVE_PATH = PathUtils.join(
    251    testProfilePath,
    252    "fake-encrypted-archive.html"
    253  );
    254  const FAKE_COMPRESSED_FILE = PathUtils.join(
    255    testProfilePath,
    256    "fake-compressed-staging-large.zip"
    257  );
    258 
    259  const MULTIPLE_OF_MAX_CHUNK_SIZE =
    260    2 * ArchiveUtils.ARCHIVE_CHUNK_MAX_BYTES_SIZE;
    261  let multipleOfMaxChunkSizeBytes = new Uint8Array(MULTIPLE_OF_MAX_CHUNK_SIZE);
    262  // seededRandomNumberGenerator is defined in head.js, but eslint doesn't seem
    263  // happy about it. Maybe that's because it's a generator function.
    264  // eslint-disable-next-line no-undef
    265  let gen = seededRandomNumberGenerator();
    266  for (let i = 0; i < MULTIPLE_OF_MAX_CHUNK_SIZE; ++i) {
    267    multipleOfMaxChunkSizeBytes.set(gen.next().value, i);
    268  }
    269 
    270  await IOUtils.write(FAKE_COMPRESSED_FILE, multipleOfMaxChunkSizeBytes);
    271 
    272  await bs.createArchive(
    273    FAKE_ARCHIVE_PATH,
    274    archiveTemplateURI,
    275    FAKE_COMPRESSED_FILE,
    276    encState,
    277    FAKE_METADATA
    278  );
    279 
    280  // This is a little bit gross - we're going to read out the data from the
    281  // generated file, find the last line longer than ARCHIVE_CHUNK_MAX_BYTES_SIZE
    282  // (which should be the last base64 encoded value), and then splice it out,
    283  // before flushing that change back to disk.
    284  let lines = (await IOUtils.readUTF8(FAKE_ARCHIVE_PATH)).split("\n");
    285  let foundIndex = -1;
    286  // The longest lines will be the base64 encoded chunks. Remove the last one.
    287  for (let i = lines.length - 1; i >= 0; i--) {
    288    if (lines[i].length > ArchiveUtils.ARCHIVE_CHUNK_MAX_BYTES_SIZE) {
    289      foundIndex = i;
    290      break;
    291    }
    292  }
    293  Assert.notEqual(foundIndex, -1, "Should have found a long line");
    294  lines.splice(foundIndex, 1);
    295  await IOUtils.writeUTF8(FAKE_ARCHIVE_PATH, lines.join("\n"));
    296 
    297  let { isEncrypted } = await bs.sampleArchive(FAKE_ARCHIVE_PATH);
    298  Assert.ok(isEncrypted, "Should be considered encrypted.");
    299 
    300  const EXTRACTION_PATH = PathUtils.join(testProfilePath, "extraction.bin");
    301 
    302  await Assert.rejects(
    303    bs.extractCompressedSnapshotFromArchive(
    304      FAKE_ARCHIVE_PATH,
    305      EXTRACTION_PATH,
    306      TEST_RECOVERY_CODE
    307    ),
    308    /Corrupted archive/
    309  );
    310 
    311  Assert.ok(
    312    !(await IOUtils.exists(EXTRACTION_PATH)),
    313    "Extraction should have been automatically destroyed."
    314  );
    315 
    316  await maybeRemovePath(FAKE_ARCHIVE_PATH);
    317  await maybeRemovePath(FAKE_COMPRESSED_FILE);
    318 });
    319 
    320 /**
    321 * Tests that if the BinaryReadableStream closes early before the last chunk
    322 * is decrypted, that the recovery file is destroyed.
    323 */
    324 add_task(async function test_createArchive_early_binary_stream_close() {
    325  const TEST_RECOVERY_CODE = "This is some recovery code.";
    326 
    327  let bs = new BackupService();
    328  let { instance: encState } =
    329    await ArchiveEncryptionState.initialize(TEST_RECOVERY_CODE);
    330 
    331  const FAKE_ARCHIVE_PATH = PathUtils.join(
    332    testProfilePath,
    333    "fake-encrypted-archive.html"
    334  );
    335 
    336  await bs.createArchive(
    337    FAKE_ARCHIVE_PATH,
    338    archiveTemplateURI,
    339    fakeCompressedStagingPath,
    340    encState,
    341    FAKE_METADATA
    342  );
    343 
    344  let { isEncrypted, startByteOffset, contentType, archiveJSON } =
    345    await bs.sampleArchive(FAKE_ARCHIVE_PATH);
    346  Assert.ok(isEncrypted, "Should be considered encrypted.");
    347 
    348  let archiveFile = await IOUtils.getFile(FAKE_ARCHIVE_PATH);
    349  let archiveStream = await bs.createBinaryReadableStream(
    350    archiveFile,
    351    startByteOffset,
    352    contentType
    353  );
    354  let decryptor = await ArchiveDecryptor.initialize(
    355    TEST_RECOVERY_CODE,
    356    archiveJSON
    357  );
    358  const EXTRACTION_PATH = PathUtils.join(testProfilePath, "extraction.bin");
    359 
    360  let binaryDecoder = new TransformStream(
    361    new DecoderDecryptorTransformer(decryptor)
    362  );
    363  let fileWriter = new WritableStream(
    364    new FileWriterStream(EXTRACTION_PATH, decryptor)
    365  );
    366 
    367  // We're going to run the characters from the archiveStream through an
    368  // intermediary TransformStream that is going to cause an abort before the
    369  // the stream can complete. We'll do that by only passing part of the first
    370  // chunk through, and then aborting.
    371  let earlyAborter = new TransformStream({
    372    async transform(chunkPart, controller) {
    373      controller.enqueue(
    374        chunkPart.substring(0, Math.floor(chunkPart.length / 2))
    375      );
    376      controller.error("We're done. Aborting early.");
    377    },
    378  });
    379 
    380  let pipePromise = archiveStream
    381    .pipeThrough(earlyAborter)
    382    .pipeThrough(binaryDecoder)
    383    .pipeTo(fileWriter);
    384 
    385  await Assert.rejects(pipePromise, /Aborting early/);
    386  Assert.ok(
    387    !(await IOUtils.exists(EXTRACTION_PATH)),
    388    "Extraction should have been automatically destroyed."
    389  );
    390 
    391  await maybeRemovePath(FAKE_ARCHIVE_PATH);
    392 });
    393 
    394 /**
    395 * Tests that if the nsIZipReader fails the CRC check, that the ZIP recovery
    396 * file is destroyed and an exception is thrown.
    397 */
    398 add_task(async function test_createArchive_corrupt_zip() {
    399  let bs = new BackupService();
    400  let corruptZipFile = do_get_file("data/corrupt.zip");
    401  let fakeRecoveryFilePath = await IOUtils.createUniqueDirectory(
    402    PathUtils.tempDir,
    403    "testCreateArchiveCorruptZipSource"
    404  );
    405  const CORRUPT_ZIP_SOURCE = PathUtils.join(
    406    fakeRecoveryFilePath,
    407    "corrupt.zip"
    408  );
    409  await IOUtils.copy(corruptZipFile.path, CORRUPT_ZIP_SOURCE);
    410 
    411  let fakeRecoveryPath = await IOUtils.createUniqueDirectory(
    412    PathUtils.tempDir,
    413    "testCreateArchiveCorruptZipDest"
    414  );
    415 
    416  await Assert.rejects(
    417    bs.decompressRecoveryFile(CORRUPT_ZIP_SOURCE, fakeRecoveryPath),
    418    /Corrupt/
    419  );
    420 
    421  let children = await IOUtils.getChildren(fakeRecoveryPath);
    422  Assert.equal(children.length, 0, "Nothing was decompressed.");
    423  Assert.ok(
    424    !(await IOUtils.exists(CORRUPT_ZIP_SOURCE)),
    425    "Corrupt zip was deleted."
    426  );
    427 });
    428 
    429 /**
    430 * Tests that if the archive file does not contain a JSON block that
    431 * BackupService.sampleArchive will reject.
    432 */
    433 add_task(async function test_missing_JSON_block() {
    434  let bs = new BackupService();
    435  let missingJSONBlockFile = do_get_file("data/missing_json_block.html");
    436  await Assert.rejects(
    437    bs.sampleArchive(missingJSONBlockFile.path),
    438    /Could not find JSON block/
    439  );
    440 });
    441 
    442 /**
    443 * Tests that if the archive file does not contain a binary block that
    444 * BackupService.extractCompressedSnapshotFromArchive will reject.
    445 */
    446 add_task(async function test_missing_binary_block() {
    447  let bs = new BackupService();
    448  let fakeRecoveryPath = await IOUtils.createUniqueDirectory(
    449    PathUtils.tempDir,
    450    "testCreateArchiveMissingBinaryBlockDest"
    451  );
    452 
    453  let missingBinaryBlockFile = do_get_file("data/missing_binary_block.html");
    454  await Assert.rejects(
    455    bs.extractCompressedSnapshotFromArchive(
    456      missingBinaryBlockFile.path,
    457      fakeRecoveryPath
    458    ),
    459    /Could not find binary block/
    460  );
    461 
    462  await maybeRemovePath(fakeRecoveryPath);
    463 });
    464 
    465 /**
    466 * Tests that if the archive file is constructed in such a way that the
    467 * worker ends up breaking Unicode characters in half when finding the
    468 * JSON block, that we can still extract the JSON block.
    469 *
    470 * See bug 1906912.
    471 */
    472 add_task(async function test_broken_unicode_characters() {
    473  let bs = new BackupService();
    474  let specialUnicodeFile = do_get_file("data/break_over_unicode.html");
    475  let { archiveJSON } = await bs.sampleArchive(specialUnicodeFile.path);
    476  Assert.ok(
    477    archiveJSON,
    478    "Was able to extract the JSON from the specially created file with " +
    479      "unicode characters"
    480  );
    481 });