test_bug381412.euc-kr.js (1807B)
1 const charset = "EUC-KR"; 2 3 function dumpStrings(inString, outString) { 4 var dispIn = ""; 5 var dispOut = ""; 6 var i; 7 for (i = 0; i < inString.length; ++i) { 8 dispIn += " x" + inString.charCodeAt(i).toString(16); 9 } 10 if (!outString.length) { 11 dispOut = "<empty>"; 12 } else { 13 for (i = 0; i < outString.length; ++i) { 14 dispOut += " x" + outString.charCodeAt(i).toString(16); 15 } 16 } 17 dump('"' + dispIn + '" ==> "' + dispOut + '"\n'); 18 } 19 20 function error(inString, outString, msg) { 21 dumpStrings(inString, outString); 22 do_throw("security risk: " + msg); 23 } 24 25 function run_test() { 26 var ScriptableUnicodeConverter = Components.Constructor( 27 "@mozilla.org/intl/scriptableunicodeconverter", 28 "nsIScriptableUnicodeConverter" 29 ); 30 31 var converter = new ScriptableUnicodeConverter(); 32 converter.charset = charset; 33 34 var leadByte, trailByte; 35 var inString; 36 for (leadByte = 1; leadByte < 0x100; ++leadByte) { 37 for (trailByte = 1; trailByte < 0x100; ++trailByte) { 38 inString = String.fromCharCode(leadByte, trailByte, 65); 39 var outString = converter.ConvertToUnicode(inString) + converter.Finish(); 40 switch (outString.length) { 41 case 1: 42 error(inString, outString, "2 byte sequence eaten"); 43 break; 44 case 2: 45 if ( 46 outString.charCodeAt(0) < 0x80 && 47 outString.charCodeAt(1) < 0x80 48 ) { 49 error(inString, outString, "2 byte sequence converted to 1 ASCII"); 50 } 51 break; 52 case 3: 53 if ( 54 outString != inString && 55 outString.charCodeAt(0) < 0x80 && 56 outString.charCodeAt(1) < 0x80 57 ) { 58 error(inString, outString, "2 byte sequence converted to 2 ASCII"); 59 } 60 break; 61 } 62 } 63 } 64 }