batchNormalization.https.any.js (9932B)
1 // META: title=validation tests for WebNN API batchNormalization operation 2 // META: global=window 3 // META: variant=?cpu 4 // META: variant=?gpu 5 // META: variant=?npu 6 // META: script=../resources/utils_validation.js 7 8 'use strict'; 9 10 let meanIndex = 0; 11 let varianceIndex = 0; 12 13 const kExampleInputDescriptor = { 14 dataType: 'float32', 15 shape: [2, 2] 16 }; 17 // 1D tensor descriptor which may be used for `mean`, `variance`, `scale`, or 18 // `bias` inputs. 19 const kExample1DTensorDescriptor = { 20 dataType: 'float32', 21 shape: [kExampleInputDescriptor.shape[/* axis */ 1]] 22 }; 23 24 multi_builder_test(async (t, builder, otherBuilder) => { 25 const inputFromOtherBuilder = 26 otherBuilder.input('input', kExampleInputDescriptor); 27 28 const mean = builder.input('mean', kExample1DTensorDescriptor); 29 const variance = builder.input('variance', kExample1DTensorDescriptor); 30 assert_throws_js( 31 TypeError, () => builder.batchNormalization(inputFromOtherBuilder, mean, variance)); 32 }, '[batchNormalization] throw if input is from another builder'); 33 34 multi_builder_test(async (t, builder, otherBuilder) => { 35 const meanFromOtherBuilder = 36 otherBuilder.input('mean', kExample1DTensorDescriptor); 37 38 const input = builder.input('input', kExampleInputDescriptor); 39 const variance = builder.input('variance', kExample1DTensorDescriptor); 40 assert_throws_js( 41 TypeError, 42 () => builder.batchNormalization(input, meanFromOtherBuilder, variance)); 43 }, '[batchNormalization] throw if mean is from another builder'); 44 45 multi_builder_test(async (t, builder, otherBuilder) => { 46 const varianceFromOtherBuilder = 47 otherBuilder.input('variance', kExample1DTensorDescriptor); 48 49 const input = builder.input('input', kExampleInputDescriptor); 50 const mean = builder.input('mean', kExample1DTensorDescriptor); 51 assert_throws_js( 52 TypeError, 53 () => builder.batchNormalization(input, mean, varianceFromOtherBuilder)); 54 }, '[batchNormalization] throw if variance is from another builder'); 55 56 multi_builder_test(async (t, builder, otherBuilder) => { 57 const scaleFromOtherBuilder = 58 otherBuilder.input('scale', kExample1DTensorDescriptor); 59 const options = {scale: scaleFromOtherBuilder}; 60 61 const input = builder.input('input', kExampleInputDescriptor); 62 const mean = builder.input('mean', kExample1DTensorDescriptor); 63 const variance = builder.input('variance', kExample1DTensorDescriptor); 64 assert_throws_js( 65 TypeError, 66 () => builder.batchNormalization(input, mean, variance, options)); 67 }, '[batchNormalization] throw if scale option is from another builder'); 68 69 multi_builder_test(async (t, builder, otherBuilder) => { 70 const biasFromOtherBuilder = 71 otherBuilder.input('bias', kExample1DTensorDescriptor); 72 const options = {scale: biasFromOtherBuilder}; 73 74 const input = builder.input('input', kExampleInputDescriptor); 75 const mean = builder.input('mean', kExample1DTensorDescriptor); 76 const variance = builder.input('variance', kExample1DTensorDescriptor); 77 assert_throws_js( 78 TypeError, 79 () => builder.batchNormalization(input, mean, variance, options)); 80 }, '[batchNormalization] throw if bias option is from another builder'); 81 82 const label = `batchNormalization_?_123`; 83 const tests = [ 84 { 85 name: '[batchNormalization] Test with default options.', 86 input: {dataType: 'float32', shape: [1, 2, 5, 5]}, 87 mean: {dataType: 'float32', shape: [2]}, 88 variance: {dataType: 'float32', shape: [2]}, 89 output: {dataType: 'float32', shape: [1, 2, 5, 5]}, 90 }, 91 { 92 name: '[batchNormalization] Test with axis = 2 and epsilon = 0.0001.', 93 input: {dataType: 'float32', shape: [1, 2, 5, 5]}, 94 mean: {dataType: 'float32', shape: [5]}, 95 variance: {dataType: 'float32', shape: [5]}, 96 options: { 97 axis: 2, 98 epsilon: 1e-4, // 1e-5 is the default value of epsilon. 99 }, 100 output: {dataType: 'float32', shape: [1, 2, 5, 5]}, 101 }, 102 { 103 name: 104 '[batchNormalization] Throw if the input data type is not one of floating point types.', 105 input: {dataType: 'int32', shape: [1, 2, 5, 5]}, 106 mean: {dataType: 'int32', shape: [2]}, 107 variance: {dataType: 'int32', shape: [2]}, 108 options: { 109 label: label, 110 }, 111 }, 112 { 113 name: 114 '[batchNormalization] Throw if the mean data type is not the same as the input data type.', 115 input: {dataType: 'float32', shape: [1, 2, 5, 5]}, 116 mean: {dataType: 'float16', shape: [2]}, 117 variance: {dataType: 'float32', shape: [2]}, 118 options: { 119 label: label, 120 }, 121 }, 122 { 123 name: '[batchNormalization] Throw if the mean operand is not a 1-D tensor.', 124 input: {dataType: 'float32', shape: [1, 2, 5, 5]}, 125 mean: {dataType: 'float32', shape: [1, 2]}, 126 variance: {dataType: 'float32', shape: [2]}, 127 options: { 128 label: label, 129 }, 130 }, 131 { 132 name: 133 '[batchNormalization] Throw if the size of mean operand is not equal to the size of the input dimension denoted by axis.', 134 input: {dataType: 'float32', shape: [1, 2, 5, 5]}, 135 mean: {dataType: 'float32', shape: [3]}, 136 variance: {dataType: 'float32', shape: [2]}, 137 options: { 138 axis: 1, 139 label: label, 140 }, 141 }, 142 { 143 name: 144 '[batchNormalization] Throw if the variance data type is not the same as the input data type.', 145 input: {dataType: 'float32', shape: [1, 2, 5, 5]}, 146 mean: {dataType: 'float32', shape: [2]}, 147 variance: {dataType: 'float16', shape: [2]}, 148 options: { 149 label: label, 150 }, 151 }, 152 { 153 name: 154 '[batchNormalization] Throw if the variance operand is not a 1-D tensor.', 155 input: {dataType: 'float32', shape: [1, 2, 5, 5]}, 156 mean: {dataType: 'float32', shape: [2]}, 157 variance: {dataType: 'float32', shape: [2, 2]}, 158 options: { 159 label: label, 160 }, 161 }, 162 { 163 name: 164 '[batchNormalization] Throw if the size of variance operand is not equal to the size of the input dimension denoted by axis.', 165 input: {dataType: 'float32', shape: [1, 2, 5, 5]}, 166 mean: {dataType: 'float32', shape: [5]}, 167 variance: {dataType: 'float32', shape: [2]}, 168 options: { 169 axis: 2, 170 label: label, 171 }, 172 }, 173 { 174 name: 175 '[batchNormalization] Throw if the scale data type is not the same as the input data type.', 176 input: {dataType: 'float16', shape: [1, 2, 5, 5]}, 177 mean: {dataType: 'float16', shape: [2]}, 178 variance: {dataType: 'float16', shape: [2]}, 179 options: { 180 scale: {dataType: 'float32', shape: [2]}, 181 label: label, 182 }, 183 }, 184 { 185 name: 186 '[batchNormalization] Throw if the scale operand is not a 1-D tensor.', 187 input: {dataType: 'float32', shape: [1, 2, 5, 5]}, 188 mean: {dataType: 'float32', shape: [2]}, 189 variance: {dataType: 'float32', shape: [2]}, 190 options: { 191 scale: {dataType: 'float32', shape: [2, 1]}, 192 label: label, 193 }, 194 }, 195 { 196 name: 197 '[batchNormalization] Throw if the size of scale operand is not equal to the size of the input dimension denoted by axis.', 198 input: {dataType: 'float32', shape: [1, 2, 5, 5]}, 199 mean: {dataType: 'float32', shape: [5]}, 200 variance: {dataType: 'float32', shape: [5]}, 201 options: { 202 axis: 2, 203 scale: {dataType: 'float32', shape: [2]}, 204 label: label, 205 }, 206 }, 207 { 208 name: 209 '[batchNormalization] Throw if the bias data type is not the same as the input data type.', 210 input: {dataType: 'float16', shape: [1, 2, 5, 5]}, 211 mean: {dataType: 'float16', shape: [2]}, 212 variance: {dataType: 'float16', shape: [2]}, 213 options: { 214 bias: {dataType: 'float32', shape: [2]}, 215 label: label, 216 }, 217 }, 218 { 219 name: '[batchNormalization] Throw if the bias operand is not a 1-D tensor.', 220 input: {dataType: 'float32', shape: [1, 2, 5, 5]}, 221 mean: {dataType: 'float32', shape: [2]}, 222 variance: {dataType: 'float32', shape: [2]}, 223 options: { 224 bias: {dataType: 'float32', shape: [2, 1]}, 225 label: label, 226 }, 227 }, 228 { 229 name: 230 '[batchNormalization] Throw if the size of bias operand is not equal to the size of the input dimension denoted by axis.', 231 input: {dataType: 'float32', shape: [1, 2, 5, 5]}, 232 mean: {dataType: 'float32', shape: [5]}, 233 variance: {dataType: 'float32', shape: [5]}, 234 options: { 235 axis: 2, 236 bias: {dataType: 'float32', shape: [2]}, 237 label: label, 238 }, 239 }, 240 { 241 name: 242 '[batchNormalization] Throw if the value of axis is not in the range of [0,N-1].', 243 input: {dataType: 'float32', shape: [1, 2, 5, 5]}, 244 mean: {dataType: 'float32', shape: [5]}, 245 variance: {dataType: 'float32', shape: [5]}, 246 options: { 247 axis: 4, 248 label: label, 249 }, 250 }, 251 { 252 name: '[batchNormalization] Throw if the input is a scalar.', 253 input: {dataType: 'float32', shape: []}, 254 mean: {dataType: 'float32', shape: [1]}, 255 variance: {dataType: 'float32', shape: [1]}, 256 options: { 257 axis: 0, 258 label: label, 259 } 260 } 261 ]; 262 263 tests.forEach( 264 test => promise_test(async t => { 265 const builder = new MLGraphBuilder(context); 266 const input = builder.input('input', test.input); 267 const mean = builder.input('mean', test.mean); 268 const variance = builder.input('variance', test.variance); 269 270 if (test.options && test.options.bias) { 271 test.options.bias = builder.input('bias', test.options.bias); 272 } 273 if (test.options && test.options.scale) { 274 test.options.scale = builder.input('scale', test.options.scale); 275 } 276 277 if (test.output) { 278 const output = 279 builder.batchNormalization(input, mean, variance, test.options); 280 assert_equals(output.dataType, test.output.dataType); 281 assert_array_equals(output.shape, test.output.shape); 282 } else { 283 const regrexp = /\[batchNormalization_\?_123\]/; 284 assert_throws_with_label( 285 () => 286 builder.batchNormalization(input, mean, variance, test.options), 287 regrexp); 288 } 289 }, test.name));