test_chunk_growth.js (1416B)
1 // This file tests SQLITE_FCNTL_CHUNK_SIZE behaves as expected 2 3 function run_sql(d, sql) { 4 var stmt = d.createStatement(sql); 5 stmt.execute(); 6 stmt.finalize(); 7 } 8 9 function new_file(name) { 10 var file = Services.dirsvc.get("ProfD", Ci.nsIFile); 11 file.append(name); 12 return file; 13 } 14 15 function get_size(name) { 16 return new_file(name).fileSize; 17 } 18 19 function run_test() { 20 const filename = "chunked.sqlite"; 21 const CHUNK_SIZE = 512 * 1024; 22 var d = getDatabase(new_file(filename)); 23 try { 24 d.setGrowthIncrement(CHUNK_SIZE, ""); 25 } catch (e) { 26 if (e.result != Cr.NS_ERROR_FILE_TOO_BIG) { 27 throw e; 28 } 29 print("Too little free space to set CHUNK_SIZE!"); 30 return; 31 } 32 run_sql(d, "CREATE TABLE bloat(data varchar)"); 33 34 var orig_size = get_size(filename); 35 /* Dump in at least 32K worth of data. 36 * While writing ensure that the file size growth in chunksize set above. 37 */ 38 const str1024 = new Array(1024).join("T"); 39 for (var i = 0; i < 32; i++) { 40 run_sql(d, "INSERT INTO bloat VALUES('" + str1024 + "')"); 41 var size = get_size(filename); 42 // Must not grow in small increments. 43 Assert.ok(size == orig_size || size >= CHUNK_SIZE); 44 } 45 /* In addition to growing in chunk-size increments, the db 46 * should shrink in chunk-size increments too. 47 */ 48 run_sql(d, "DELETE FROM bloat"); 49 run_sql(d, "VACUUM"); 50 Assert.greaterOrEqual(get_size(filename), CHUNK_SIZE); 51 }