layerNormalization.https.any.js (6951B)
1 // META: title=validation tests for WebNN API layerNormalization operation 2 // META: global=window 3 // META: variant=?cpu 4 // META: variant=?gpu 5 // META: variant=?npu 6 // META: script=../resources/utils_validation.js 7 8 'use strict'; 9 10 const kExampleInputDescriptor = { 11 dataType: 'float32', 12 shape: [2, 2] 13 }; 14 15 validateInputFromAnotherBuilder('layerNormalization'); 16 17 multi_builder_test(async (t, builder, otherBuilder) => { 18 const scaleFromOtherBuilder = 19 otherBuilder.input('scale', kExampleInputDescriptor); 20 const options = {scale: scaleFromOtherBuilder}; 21 22 const input = builder.input('input', kExampleInputDescriptor); 23 assert_throws_js(TypeError, () => builder.layerNormalization(input, options)); 24 }, '[layerNormalization] throw if scale option is from another builder'); 25 26 multi_builder_test(async (t, builder, otherBuilder) => { 27 const biasFromOtherBuilder = 28 otherBuilder.input('bias', kExampleInputDescriptor); 29 const options = {bias: biasFromOtherBuilder}; 30 31 const input = builder.input('input', kExampleInputDescriptor); 32 assert_throws_js(TypeError, () => builder.layerNormalization(input, options)); 33 }, '[layerNormalization] throw if bias option is from another builder'); 34 35 const label = 'instance_normalization'; 36 const tests = [ 37 { 38 name: '[layerNormalization] Test with default options for scalar input.', 39 input: {dataType: 'float32', shape: []}, 40 output: {dataType: 'float32', shape: []}, 41 }, 42 { 43 name: '[layerNormalization] Test when the input data type is float16.', 44 input: {dataType: 'float16', shape: []}, 45 output: {dataType: 'float16', shape: []}, 46 options: {label} 47 }, 48 { 49 name: '[layerNormalization] Test with given axes.', 50 input: {dataType: 'float32', shape: [1, 2, 3, 4]}, 51 options: { 52 axes: [3], 53 }, 54 output: {dataType: 'float32', shape: [1, 2, 3, 4]}, 55 }, 56 { 57 name: '[layerNormalization] Test with given scale.', 58 input: {dataType: 'float32', shape: [1, 2, 3, 4]}, 59 options: { 60 scale: {dataType: 'float32', shape: [2, 3, 4]}, 61 }, 62 output: {dataType: 'float32', shape: [1, 2, 3, 4]}, 63 }, 64 { 65 name: '[layerNormalization] Test with a non-default epsilon value.', 66 input: {dataType: 'float32', shape: [1, 2, 3, 4]}, 67 options: { 68 epsilon: 1e-4, // default epsilon=1e-5 69 }, 70 output: {dataType: 'float32', shape: [1, 2, 3, 4]}, 71 }, 72 { 73 name: '[layerNormalization] Test with given axes, scale and bias.', 74 input: {dataType: 'float32', shape: [1, 2, 3, 4]}, 75 options: { 76 scale: {dataType: 'float32', shape: [3, 4]}, 77 bias: {dataType: 'float32', shape: [3, 4]}, 78 axes: [2, 3], 79 }, 80 output: {dataType: 'float32', shape: [1, 2, 3, 4]}, 81 }, 82 { 83 name: '[layerNormalization] Test with nonconsecutive axes.', 84 input: {dataType: 'float32', shape: [1, 2, 3, 4, 5, 6]}, 85 options: { 86 scale: {dataType: 'float32', shape: [2, 4, 6]}, 87 bias: {dataType: 'float32', shape: [2, 4, 6]}, 88 axes: [1, 3, 5], 89 }, 90 output: {dataType: 'float32', shape: [1, 2, 3, 4, 5, 6]}, 91 }, 92 { 93 name: '[layerNormalization] Test with axes in descending order.', 94 input: {dataType: 'float32', shape: [1, 2, 3, 4, 5, 6]}, 95 options: { 96 scale: {dataType: 'float32', shape: [6, 5, 4, 3, 2]}, 97 bias: {dataType: 'float32', shape: [6, 5, 4, 3, 2]}, 98 axes: [5, 4, 3, 2, 1] 99 }, 100 output: {dataType: 'float32', shape: [1, 2, 3, 4, 5, 6]}, 101 }, 102 { 103 name: 104 '[layerNormalization] Throw if the input data type is not one of the floating point types.', 105 input: {dataType: 'uint32', shape: [1, 2, 3, 4]}, 106 options: {label} 107 }, 108 { 109 name: 110 '[layerNormalization] Throw if the axis is greater than the input rank.', 111 input: {dataType: 'float32', shape: [1, 2, 3, 4]}, 112 options: { 113 axes: [1, 2, 4], 114 label: label, 115 }, 116 }, 117 { 118 name: '[layerNormalization] Throw if the axes have duplications.', 119 input: {dataType: 'float32', shape: [1, 2, 3, 4]}, 120 options: { 121 axes: [3, 3], 122 label: label, 123 }, 124 }, 125 { 126 name: 127 '[layerNormalization] Throw if the bias data type doesn\'t match input data type', 128 input: {dataType: 'float32', shape: [1, 2, 3, 4]}, 129 options: { 130 scale: {dataType: 'float32', shape: [3, 4]}, 131 bias: {dataType: 'float16', shape: [3, 4]}, 132 axes: [2, 3], 133 label: label, 134 }, 135 }, 136 { 137 name: 138 '[layerNormalization] Throw if the scale data type doesn\'t match input data type', 139 input: {dataType: 'float32', shape: [1, 2, 3, 4]}, 140 options: { 141 scale: {dataType: 'float16', shape: [3, 4]}, 142 bias: {dataType: 'float32', shape: [3, 4]}, 143 axes: [2, 3], 144 label: label, 145 }, 146 }, 147 { 148 name: 149 '[layerNormalization] Throw if the bias dimensions doesn\'t match axis dimensions.', 150 input: {dataType: 'float32', shape: [1, 2, 3, 4]}, 151 options: { 152 bias: { 153 dataType: 'float32', 154 shape: [3, 3, 4] 155 }, // for 4D input, default axes = [1,2,3] 156 label: label, 157 }, 158 }, 159 { 160 name: 161 '[layerNormalization] Throw if the scale dimensions doesn\'t match axis dimensions.', 162 input: {dataType: 'float32', shape: [1, 2, 3, 4]}, 163 options: { 164 scale: { 165 dataType: 'float32', 166 shape: [3, 3, 4] 167 }, // for 4D input, default axes = [1,2,3] 168 label: label, 169 }, 170 }, 171 { 172 name: 173 '[layerNormalization] Throw if the bias rank doesn\'t match axis rank.', 174 input: {dataType: 'float32', shape: [1, 2, 3, 4]}, 175 options: { 176 bias: { 177 dataType: 'float32', 178 shape: [1, 2, 3, 4] 179 }, // for 4D input, default axes = [1,2,3] 180 label: label, 181 }, 182 }, 183 { 184 name: 185 '[layerNormalization] Throw if the scale rank doesn\'t match axis rank.', 186 input: {dataType: 'float32', shape: [1, 2, 3, 4]}, 187 options: { 188 scale: { 189 dataType: 'float32', 190 shape: [1, 2, 3, 4] 191 }, // for 4D input, default axes = [1,2,3] 192 label: label, 193 }, 194 }, 195 ]; 196 197 tests.forEach( 198 test => promise_test(async t => { 199 const builder = new MLGraphBuilder(context); 200 const input = builder.input('input', test.input); 201 202 if (test.options && test.options.bias) { 203 test.options.bias = builder.input('bias', test.options.bias); 204 } 205 if (test.options && test.options.scale) { 206 test.options.scale = builder.input('scale', test.options.scale); 207 } 208 209 if (test.output && 210 context.opSupportLimits().layerNormalization.input.dataTypes.includes( 211 test.input.dataType)) { 212 const output = builder.layerNormalization(input, test.options); 213 assert_equals(output.dataType, test.output.dataType); 214 assert_array_equals(output.shape, test.output.shape); 215 } else { 216 const regrexp = new RegExp('\\[' + label + '\\]'); 217 assert_throws_with_label( 218 () => builder.layerNormalization(input, test.options), regrexp); 219 } 220 }, test.name));