MoveEmitter-riscv64.cpp (11052B)
1 /* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 2 -*- 2 * vim: set ts=8 sts=2 et sw=2 tw=80: 3 * This Source Code Form is subject to the terms of the Mozilla Public 4 * License, v. 2.0. If a copy of the MPL was not distributed with this 5 * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ 6 7 #include "jit/riscv64/MoveEmitter-riscv64.h" 8 9 #include "jit/MacroAssembler-inl.h" 10 11 using namespace js; 12 using namespace js::jit; 13 14 void MoveEmitterRiscv64::breakCycle(const MoveOperand& from, 15 const MoveOperand& to, MoveOp::Type type, 16 uint32_t slotId) { 17 // There is some pattern: 18 // (A -> B) 19 // (B -> A) 20 // 21 // This case handles (A -> B), which we reach first. We save B, then allow 22 // the original move to continue. 23 switch (type) { 24 case MoveOp::FLOAT32: 25 if (to.isMemory()) { 26 ScratchFloat32Scope fpscratch32(masm); 27 masm.loadFloat32(getAdjustedAddress(to), fpscratch32); 28 masm.storeFloat32(fpscratch32, cycleSlot(slotId)); 29 } else { 30 masm.storeFloat32(to.floatReg(), cycleSlot(slotId)); 31 } 32 break; 33 case MoveOp::DOUBLE: 34 if (to.isMemory()) { 35 ScratchDoubleScope fpscratch64(masm); 36 masm.loadDouble(getAdjustedAddress(to), fpscratch64); 37 masm.storeDouble(fpscratch64, cycleSlot(slotId)); 38 } else { 39 masm.storeDouble(to.floatReg(), cycleSlot(slotId)); 40 } 41 break; 42 case MoveOp::INT32: 43 if (to.isMemory()) { 44 UseScratchRegisterScope temps(&masm); 45 Register scratch2 = temps.Acquire(); 46 masm.load32(getAdjustedAddress(to), scratch2); 47 masm.store32(scratch2, cycleSlot(0)); 48 } else { 49 masm.store32(to.reg(), cycleSlot(0)); 50 } 51 break; 52 case MoveOp::GENERAL: 53 if (to.isMemory()) { 54 UseScratchRegisterScope temps(&masm); 55 Register scratch2 = temps.Acquire(); 56 masm.loadPtr(getAdjustedAddress(to), scratch2); 57 masm.storePtr(scratch2, cycleSlot(0)); 58 } else { 59 masm.storePtr(to.reg(), cycleSlot(0)); 60 } 61 break; 62 default: 63 MOZ_CRASH("Unexpected move type"); 64 } 65 } 66 67 void MoveEmitterRiscv64::completeCycle(const MoveOperand& from, 68 const MoveOperand& to, MoveOp::Type type, 69 uint32_t slotId) { 70 // There is some pattern: 71 // (A -> B) 72 // (B -> A) 73 // 74 // This case handles (B -> A), which we reach last. We emit a move from the 75 // saved value of B, to A. 76 switch (type) { 77 case MoveOp::FLOAT32: 78 if (to.isMemory()) { 79 ScratchFloat32Scope fpscratch32(masm); 80 masm.loadFloat32(cycleSlot(slotId), fpscratch32); 81 masm.storeFloat32(fpscratch32, getAdjustedAddress(to)); 82 } else { 83 masm.loadFloat32(cycleSlot(slotId), to.floatReg()); 84 } 85 break; 86 case MoveOp::DOUBLE: 87 if (to.isMemory()) { 88 ScratchDoubleScope fpscratch64(masm); 89 masm.loadDouble(cycleSlot(slotId), fpscratch64); 90 masm.storeDouble(fpscratch64, getAdjustedAddress(to)); 91 } else { 92 masm.loadDouble(cycleSlot(slotId), to.floatReg()); 93 } 94 break; 95 case MoveOp::INT32: 96 MOZ_ASSERT(slotId == 0); 97 if (to.isMemory()) { 98 UseScratchRegisterScope temps(&masm); 99 Register scratch2 = temps.Acquire(); 100 masm.load32(cycleSlot(0), scratch2); 101 masm.store32(scratch2, getAdjustedAddress(to)); 102 } else { 103 masm.load32(cycleSlot(0), to.reg()); 104 } 105 break; 106 case MoveOp::GENERAL: 107 MOZ_ASSERT(slotId == 0); 108 if (to.isMemory()) { 109 UseScratchRegisterScope temps(&masm); 110 Register scratch2 = temps.Acquire(); 111 masm.loadPtr(cycleSlot(0), scratch2); 112 masm.storePtr(scratch2, getAdjustedAddress(to)); 113 } else { 114 masm.loadPtr(cycleSlot(0), to.reg()); 115 } 116 break; 117 default: 118 MOZ_CRASH("Unexpected move type"); 119 } 120 } 121 122 void MoveEmitterRiscv64::emit(const MoveResolver& moves) { 123 if (moves.numCycles()) { 124 // Reserve stack for cycle resolution 125 static_assert(SpillSlotSize == 8); 126 masm.reserveStack(moves.numCycles() * SpillSlotSize); 127 pushedAtCycle_ = masm.framePushed(); 128 } 129 130 for (size_t i = 0; i < moves.numMoves(); i++) { 131 emit(moves.getMove(i)); 132 } 133 } 134 135 void MoveEmitterRiscv64::emit(const MoveOp& move) { 136 const MoveOperand& from = move.from(); 137 const MoveOperand& to = move.to(); 138 139 if (move.isCycleEnd() && move.isCycleBegin()) { 140 // A fun consequence of aliased registers is you can have multiple 141 // cycles at once, and one can end exactly where another begins. 142 breakCycle(from, to, move.endCycleType(), move.cycleBeginSlot()); 143 completeCycle(from, to, move.type(), move.cycleEndSlot()); 144 return; 145 } 146 147 if (move.isCycleEnd()) { 148 MOZ_ASSERT(inCycle_); 149 completeCycle(from, to, move.type(), move.cycleEndSlot()); 150 MOZ_ASSERT(inCycle_ > 0); 151 inCycle_--; 152 return; 153 } 154 155 if (move.isCycleBegin()) { 156 breakCycle(from, to, move.endCycleType(), move.cycleBeginSlot()); 157 inCycle_++; 158 } 159 160 switch (move.type()) { 161 case MoveOp::FLOAT32: 162 emitFloat32Move(from, to); 163 break; 164 case MoveOp::DOUBLE: 165 emitDoubleMove(from, to); 166 break; 167 case MoveOp::INT32: 168 emitInt32Move(from, to); 169 break; 170 case MoveOp::GENERAL: 171 emitMove(from, to); 172 break; 173 default: 174 MOZ_CRASH("Unexpected move type"); 175 } 176 } 177 178 void MoveEmitterRiscv64::emitMove(const MoveOperand& from, 179 const MoveOperand& to) { 180 if (from.isGeneralReg()) { 181 if (to.isGeneralReg()) { 182 masm.movePtr(from.reg(), to.reg()); 183 } else if (to.isMemory()) { 184 masm.storePtr(from.reg(), getAdjustedAddress(to)); 185 } else { 186 MOZ_CRASH("Invalid emitMove arguments."); 187 } 188 } else if (from.isMemory()) { 189 if (to.isGeneralReg()) { 190 masm.loadPtr(getAdjustedAddress(from), to.reg()); 191 } else if (to.isMemory()) { 192 UseScratchRegisterScope temps(&masm); 193 Register scratch2 = temps.Acquire(); 194 masm.loadPtr(getAdjustedAddress(from), scratch2); 195 masm.storePtr(scratch2, getAdjustedAddress(to)); 196 } else { 197 MOZ_CRASH("Invalid emitMove arguments."); 198 } 199 } else if (from.isEffectiveAddress()) { 200 if (to.isGeneralReg()) { 201 masm.computeEffectiveAddress(getAdjustedAddress(from), to.reg()); 202 } else if (to.isMemory()) { 203 UseScratchRegisterScope temps(&masm); 204 Register scratch2 = temps.Acquire(); 205 masm.computeEffectiveAddress(getAdjustedAddress(from), scratch2); 206 masm.storePtr(scratch2, getAdjustedAddress(to)); 207 } else { 208 MOZ_CRASH("Invalid emitMove arguments."); 209 } 210 } else { 211 MOZ_CRASH("Invalid emitMove arguments."); 212 } 213 } 214 215 void MoveEmitterRiscv64::emitInt32Move(const MoveOperand& from, 216 const MoveOperand& to) { 217 if (from.isGeneralReg()) { 218 if (to.isGeneralReg()) { 219 masm.move32(from.reg(), to.reg()); 220 } else if (to.isMemory()) { 221 masm.store32(from.reg(), getAdjustedAddress(to)); 222 } else { 223 MOZ_CRASH("Invalid emitInt32Move arguments."); 224 } 225 } else if (from.isMemory()) { 226 if (to.isGeneralReg()) { 227 masm.load32(getAdjustedAddress(from), to.reg()); 228 } else if (to.isMemory()) { 229 UseScratchRegisterScope temps(&masm); 230 Register scratch2 = temps.Acquire(); 231 masm.load32(getAdjustedAddress(from), scratch2); 232 masm.store32(scratch2, getAdjustedAddress(to)); 233 } else { 234 MOZ_CRASH("Invalid emitInt32Move arguments."); 235 } 236 } else if (from.isEffectiveAddress()) { 237 if (to.isGeneralReg()) { 238 masm.computeEffectiveAddress(getAdjustedAddress(from), to.reg()); 239 } else if (to.isMemory()) { 240 UseScratchRegisterScope temps(&masm); 241 Register scratch2 = temps.Acquire(); 242 masm.computeEffectiveAddress(getAdjustedAddress(from), scratch2); 243 masm.store32(scratch2, getAdjustedAddress(to)); 244 } else { 245 MOZ_CRASH("Invalid emitInt32Move arguments."); 246 } 247 } else { 248 MOZ_CRASH("Invalid emitInt32Move arguments."); 249 } 250 } 251 252 void MoveEmitterRiscv64::emitFloat32Move(const MoveOperand& from, 253 const MoveOperand& to) { 254 if (from.isFloatReg()) { 255 if (to.isFloatReg()) { 256 masm.fmv_s(to.floatReg(), from.floatReg()); 257 } else if (to.isGeneralReg()) { 258 // This should only be used when passing float parameter in a1,a2,a3 259 MOZ_ASSERT(to.reg() == a1 || to.reg() == a2 || to.reg() == a3); 260 masm.fmv_x_w(to.reg(), from.floatReg()); 261 } else { 262 MOZ_ASSERT(to.isMemory()); 263 masm.storeFloat32(from.floatReg(), getAdjustedAddress(to)); 264 } 265 } else if (to.isFloatReg()) { 266 MOZ_ASSERT(from.isMemory()); 267 masm.loadFloat32(getAdjustedAddress(from), to.floatReg()); 268 } else if (to.isGeneralReg()) { 269 MOZ_ASSERT(from.isMemory()); 270 // This should only be used when passing float parameter in a1,a2,a3 271 MOZ_ASSERT(to.reg() == a1 || to.reg() == a2 || to.reg() == a3); 272 masm.loadPtr(getAdjustedAddress(from), to.reg()); 273 } else { 274 MOZ_ASSERT(from.isMemory()); 275 MOZ_ASSERT(to.isMemory()); 276 ScratchFloat32Scope fpscratch32(masm); 277 masm.loadFloat32(getAdjustedAddress(from), fpscratch32); 278 masm.storeFloat32(fpscratch32, getAdjustedAddress(to)); 279 } 280 } 281 282 void MoveEmitterRiscv64::emitDoubleMove(const MoveOperand& from, 283 const MoveOperand& to) { 284 if (from.isFloatReg()) { 285 if (to.isFloatReg()) { 286 masm.fmv_d(to.floatReg(), from.floatReg()); 287 } else if (to.isGeneralReg()) { 288 masm.fmv_x_d(to.reg(), from.floatReg()); 289 } else { 290 MOZ_ASSERT(to.isMemory()); 291 masm.storeDouble(from.floatReg(), getAdjustedAddress(to)); 292 } 293 } else if (to.isFloatReg()) { 294 if (from.isMemory()) { 295 masm.loadDouble(getAdjustedAddress(from), to.floatReg()); 296 } else { 297 masm.fmv_d_x(to.floatReg(), from.reg()); 298 } 299 } else { 300 MOZ_ASSERT(from.isMemory()); 301 MOZ_ASSERT(to.isMemory()); 302 ScratchDoubleScope fpscratch64(masm); 303 masm.loadDouble(getAdjustedAddress(from), fpscratch64); 304 masm.storeDouble(fpscratch64, getAdjustedAddress(to)); 305 } 306 } 307 308 Address MoveEmitterRiscv64::cycleSlot(uint32_t slot, uint32_t subslot) const { 309 int32_t offset = masm.framePushed() - pushedAtCycle_; 310 return Address(StackPointer, offset + slot * sizeof(double) + subslot); 311 } 312 313 int32_t MoveEmitterRiscv64::getAdjustedOffset(const MoveOperand& operand) { 314 MOZ_ASSERT(operand.isMemoryOrEffectiveAddress()); 315 if (operand.base() != StackPointer) { 316 return operand.disp(); 317 } 318 319 // Adjust offset if stack pointer has been moved. 320 return operand.disp() + masm.framePushed() - pushedAtStart_; 321 } 322 323 Address MoveEmitterRiscv64::getAdjustedAddress(const MoveOperand& operand) { 324 return Address(operand.base(), getAdjustedOffset(operand)); 325 } 326 327 void MoveEmitterRiscv64::assertDone() { MOZ_ASSERT(inCycle_ == 0); } 328 329 void MoveEmitterRiscv64::finish() { 330 assertDone(); 331 332 masm.freeStack(masm.framePushed() - pushedAtStart_); 333 }