fe_sq2.c (6304B)
1 #include "fe.h" 2 #include "crypto_int64.h" 3 4 /* 5 h = 2 * f * f 6 Can overlap h with f. 7 8 Preconditions: 9 |f| bounded by 1.65*2^26,1.65*2^25,1.65*2^26,1.65*2^25,etc. 10 11 Postconditions: 12 |h| bounded by 1.01*2^25,1.01*2^24,1.01*2^25,1.01*2^24,etc. 13 */ 14 15 /* 16 See fe_mul.c for discussion of implementation strategy. 17 */ 18 19 void fe_sq2(fe h,const fe f) 20 { 21 crypto_int32 f0 = f[0]; 22 crypto_int32 f1 = f[1]; 23 crypto_int32 f2 = f[2]; 24 crypto_int32 f3 = f[3]; 25 crypto_int32 f4 = f[4]; 26 crypto_int32 f5 = f[5]; 27 crypto_int32 f6 = f[6]; 28 crypto_int32 f7 = f[7]; 29 crypto_int32 f8 = f[8]; 30 crypto_int32 f9 = f[9]; 31 crypto_int32 f0_2 = 2 * f0; 32 crypto_int32 f1_2 = 2 * f1; 33 crypto_int32 f2_2 = 2 * f2; 34 crypto_int32 f3_2 = 2 * f3; 35 crypto_int32 f4_2 = 2 * f4; 36 crypto_int32 f5_2 = 2 * f5; 37 crypto_int32 f6_2 = 2 * f6; 38 crypto_int32 f7_2 = 2 * f7; 39 crypto_int32 f5_38 = 38 * f5; /* 1.959375*2^30 */ 40 crypto_int32 f6_19 = 19 * f6; /* 1.959375*2^30 */ 41 crypto_int32 f7_38 = 38 * f7; /* 1.959375*2^30 */ 42 crypto_int32 f8_19 = 19 * f8; /* 1.959375*2^30 */ 43 crypto_int32 f9_38 = 38 * f9; /* 1.959375*2^30 */ 44 crypto_int64 f0f0 = f0 * (crypto_int64) f0; 45 crypto_int64 f0f1_2 = f0_2 * (crypto_int64) f1; 46 crypto_int64 f0f2_2 = f0_2 * (crypto_int64) f2; 47 crypto_int64 f0f3_2 = f0_2 * (crypto_int64) f3; 48 crypto_int64 f0f4_2 = f0_2 * (crypto_int64) f4; 49 crypto_int64 f0f5_2 = f0_2 * (crypto_int64) f5; 50 crypto_int64 f0f6_2 = f0_2 * (crypto_int64) f6; 51 crypto_int64 f0f7_2 = f0_2 * (crypto_int64) f7; 52 crypto_int64 f0f8_2 = f0_2 * (crypto_int64) f8; 53 crypto_int64 f0f9_2 = f0_2 * (crypto_int64) f9; 54 crypto_int64 f1f1_2 = f1_2 * (crypto_int64) f1; 55 crypto_int64 f1f2_2 = f1_2 * (crypto_int64) f2; 56 crypto_int64 f1f3_4 = f1_2 * (crypto_int64) f3_2; 57 crypto_int64 f1f4_2 = f1_2 * (crypto_int64) f4; 58 crypto_int64 f1f5_4 = f1_2 * (crypto_int64) f5_2; 59 crypto_int64 f1f6_2 = f1_2 * (crypto_int64) f6; 60 crypto_int64 f1f7_4 = f1_2 * (crypto_int64) f7_2; 61 crypto_int64 f1f8_2 = f1_2 * (crypto_int64) f8; 62 crypto_int64 f1f9_76 = f1_2 * (crypto_int64) f9_38; 63 crypto_int64 f2f2 = f2 * (crypto_int64) f2; 64 crypto_int64 f2f3_2 = f2_2 * (crypto_int64) f3; 65 crypto_int64 f2f4_2 = f2_2 * (crypto_int64) f4; 66 crypto_int64 f2f5_2 = f2_2 * (crypto_int64) f5; 67 crypto_int64 f2f6_2 = f2_2 * (crypto_int64) f6; 68 crypto_int64 f2f7_2 = f2_2 * (crypto_int64) f7; 69 crypto_int64 f2f8_38 = f2_2 * (crypto_int64) f8_19; 70 crypto_int64 f2f9_38 = f2 * (crypto_int64) f9_38; 71 crypto_int64 f3f3_2 = f3_2 * (crypto_int64) f3; 72 crypto_int64 f3f4_2 = f3_2 * (crypto_int64) f4; 73 crypto_int64 f3f5_4 = f3_2 * (crypto_int64) f5_2; 74 crypto_int64 f3f6_2 = f3_2 * (crypto_int64) f6; 75 crypto_int64 f3f7_76 = f3_2 * (crypto_int64) f7_38; 76 crypto_int64 f3f8_38 = f3_2 * (crypto_int64) f8_19; 77 crypto_int64 f3f9_76 = f3_2 * (crypto_int64) f9_38; 78 crypto_int64 f4f4 = f4 * (crypto_int64) f4; 79 crypto_int64 f4f5_2 = f4_2 * (crypto_int64) f5; 80 crypto_int64 f4f6_38 = f4_2 * (crypto_int64) f6_19; 81 crypto_int64 f4f7_38 = f4 * (crypto_int64) f7_38; 82 crypto_int64 f4f8_38 = f4_2 * (crypto_int64) f8_19; 83 crypto_int64 f4f9_38 = f4 * (crypto_int64) f9_38; 84 crypto_int64 f5f5_38 = f5 * (crypto_int64) f5_38; 85 crypto_int64 f5f6_38 = f5_2 * (crypto_int64) f6_19; 86 crypto_int64 f5f7_76 = f5_2 * (crypto_int64) f7_38; 87 crypto_int64 f5f8_38 = f5_2 * (crypto_int64) f8_19; 88 crypto_int64 f5f9_76 = f5_2 * (crypto_int64) f9_38; 89 crypto_int64 f6f6_19 = f6 * (crypto_int64) f6_19; 90 crypto_int64 f6f7_38 = f6 * (crypto_int64) f7_38; 91 crypto_int64 f6f8_38 = f6_2 * (crypto_int64) f8_19; 92 crypto_int64 f6f9_38 = f6 * (crypto_int64) f9_38; 93 crypto_int64 f7f7_38 = f7 * (crypto_int64) f7_38; 94 crypto_int64 f7f8_38 = f7_2 * (crypto_int64) f8_19; 95 crypto_int64 f7f9_76 = f7_2 * (crypto_int64) f9_38; 96 crypto_int64 f8f8_19 = f8 * (crypto_int64) f8_19; 97 crypto_int64 f8f9_38 = f8 * (crypto_int64) f9_38; 98 crypto_int64 f9f9_38 = f9 * (crypto_int64) f9_38; 99 crypto_int64 h0 = f0f0 +f1f9_76+f2f8_38+f3f7_76+f4f6_38+f5f5_38; 100 crypto_int64 h1 = f0f1_2+f2f9_38+f3f8_38+f4f7_38+f5f6_38; 101 crypto_int64 h2 = f0f2_2+f1f1_2 +f3f9_76+f4f8_38+f5f7_76+f6f6_19; 102 crypto_int64 h3 = f0f3_2+f1f2_2 +f4f9_38+f5f8_38+f6f7_38; 103 crypto_int64 h4 = f0f4_2+f1f3_4 +f2f2 +f5f9_76+f6f8_38+f7f7_38; 104 crypto_int64 h5 = f0f5_2+f1f4_2 +f2f3_2 +f6f9_38+f7f8_38; 105 crypto_int64 h6 = f0f6_2+f1f5_4 +f2f4_2 +f3f3_2 +f7f9_76+f8f8_19; 106 crypto_int64 h7 = f0f7_2+f1f6_2 +f2f5_2 +f3f4_2 +f8f9_38; 107 crypto_int64 h8 = f0f8_2+f1f7_4 +f2f6_2 +f3f5_4 +f4f4 +f9f9_38; 108 crypto_int64 h9 = f0f9_2+f1f8_2 +f2f7_2 +f3f6_2 +f4f5_2; 109 crypto_int64 carry0; 110 crypto_int64 carry1; 111 crypto_int64 carry2; 112 crypto_int64 carry3; 113 crypto_int64 carry4; 114 crypto_int64 carry5; 115 crypto_int64 carry6; 116 crypto_int64 carry7; 117 crypto_int64 carry8; 118 crypto_int64 carry9; 119 120 h0 += h0; 121 h1 += h1; 122 h2 += h2; 123 h3 += h3; 124 h4 += h4; 125 h5 += h5; 126 h6 += h6; 127 h7 += h7; 128 h8 += h8; 129 h9 += h9; 130 131 carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= SHL64(carry0,26); 132 carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= SHL64(carry4,26); 133 134 carry1 = (h1 + (crypto_int64) (1<<24)) >> 25; h2 += carry1; h1 -= SHL64(carry1,25); 135 carry5 = (h5 + (crypto_int64) (1<<24)) >> 25; h6 += carry5; h5 -= SHL64(carry5,25); 136 137 carry2 = (h2 + (crypto_int64) (1<<25)) >> 26; h3 += carry2; h2 -= SHL64(carry2,26); 138 carry6 = (h6 + (crypto_int64) (1<<25)) >> 26; h7 += carry6; h6 -= SHL64(carry6,26); 139 140 carry3 = (h3 + (crypto_int64) (1<<24)) >> 25; h4 += carry3; h3 -= SHL64(carry3,25); 141 carry7 = (h7 + (crypto_int64) (1<<24)) >> 25; h8 += carry7; h7 -= SHL64(carry7,25); 142 143 carry4 = (h4 + (crypto_int64) (1<<25)) >> 26; h5 += carry4; h4 -= SHL64(carry4,26); 144 carry8 = (h8 + (crypto_int64) (1<<25)) >> 26; h9 += carry8; h8 -= SHL64(carry8,26); 145 146 carry9 = (h9 + (crypto_int64) (1<<24)) >> 25; h0 += carry9 * 19; h9 -= SHL64(carry9,25); 147 148 carry0 = (h0 + (crypto_int64) (1<<25)) >> 26; h1 += carry0; h0 -= SHL64(carry0,26); 149 150 h[0] = (crypto_int32) h0; 151 h[1] = (crypto_int32) h1; 152 h[2] = (crypto_int32) h2; 153 h[3] = (crypto_int32) h3; 154 h[4] = (crypto_int32) h4; 155 h[5] = (crypto_int32) h5; 156 h[6] = (crypto_int32) h6; 157 h[7] = (crypto_int32) h7; 158 h[8] = (crypto_int32) h8; 159 h[9] = (crypto_int32) h9; 160 }